Merged revisions 209304,209307,209332,209338-209339,209343,209346,209351,209354,20936...
[official-gcc.git] / gcc-4_9 / gcc / fold-const.c
blob0dccbf70ad7a6709089b3ce6664cc51b942c4f12
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
72 /* Nonzero if we are folding constants inside an initializer; zero
73 otherwise. */
74 int folding_initializer = 0;
76 /* The following constants represent a bit based encoding of GCC's
77 comparison operators. This encoding simplifies transformations
78 on relational comparison operators, such as AND and OR. */
79 enum comparison_code {
80 COMPCODE_FALSE = 0,
81 COMPCODE_LT = 1,
82 COMPCODE_EQ = 2,
83 COMPCODE_LE = 3,
84 COMPCODE_GT = 4,
85 COMPCODE_LTGT = 5,
86 COMPCODE_GE = 6,
87 COMPCODE_ORD = 7,
88 COMPCODE_UNORD = 8,
89 COMPCODE_UNLT = 9,
90 COMPCODE_UNEQ = 10,
91 COMPCODE_UNLE = 11,
92 COMPCODE_UNGT = 12,
93 COMPCODE_NE = 13,
94 COMPCODE_UNGE = 14,
95 COMPCODE_TRUE = 15
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
130 tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static tree fold_mathfn_compare (location_t,
138 enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
151 static location_t
152 expr_location_or (tree t, location_t loc)
154 location_t tloc = EXPR_LOCATION (t);
155 return tloc == UNKNOWN_LOCATION ? loc : tloc;
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
161 static inline tree
162 protected_set_expr_location_unshare (tree x, location_t loc)
164 if (CAN_HAVE_LOCATION_P (x)
165 && EXPR_LOCATION (x) != loc
166 && !(TREE_CODE (x) == SAVE_EXPR
167 || TREE_CODE (x) == TARGET_EXPR
168 || TREE_CODE (x) == BIND_EXPR))
170 x = copy_node (x);
171 SET_EXPR_LOCATION (x, loc);
173 return x;
176 /* If ARG2 divides ARG1 with zero remainder, carries out the division
177 of type CODE and returns the quotient.
178 Otherwise returns NULL_TREE. */
180 tree
181 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
183 double_int quo, rem;
184 int uns;
186 /* The sign of the division is according to operand two, that
187 does the correct thing for POINTER_PLUS_EXPR where we want
188 a signed division. */
189 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
191 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
192 uns, code, &rem);
194 if (rem.is_zero ())
195 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
197 return NULL_TREE;
200 /* This is nonzero if we should defer warnings about undefined
201 overflow. This facility exists because these warnings are a
202 special case. The code to estimate loop iterations does not want
203 to issue any warnings, since it works with expressions which do not
204 occur in user code. Various bits of cleanup code call fold(), but
205 only use the result if it has certain characteristics (e.g., is a
206 constant); that code only wants to issue a warning if the result is
207 used. */
209 static int fold_deferring_overflow_warnings;
211 /* If a warning about undefined overflow is deferred, this is the
212 warning. Note that this may cause us to turn two warnings into
213 one, but that is fine since it is sufficient to only give one
214 warning per expression. */
216 static const char* fold_deferred_overflow_warning;
218 /* If a warning about undefined overflow is deferred, this is the
219 level at which the warning should be emitted. */
221 static enum warn_strict_overflow_code fold_deferred_overflow_code;
223 /* Start deferring overflow warnings. We could use a stack here to
224 permit nested calls, but at present it is not necessary. */
226 void
227 fold_defer_overflow_warnings (void)
229 ++fold_deferring_overflow_warnings;
232 /* Stop deferring overflow warnings. If there is a pending warning,
233 and ISSUE is true, then issue the warning if appropriate. STMT is
234 the statement with which the warning should be associated (used for
235 location information); STMT may be NULL. CODE is the level of the
236 warning--a warn_strict_overflow_code value. This function will use
237 the smaller of CODE and the deferred code when deciding whether to
238 issue the warning. CODE may be zero to mean to always use the
239 deferred code. */
241 void
242 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
244 const char *warnmsg;
245 location_t locus;
247 gcc_assert (fold_deferring_overflow_warnings > 0);
248 --fold_deferring_overflow_warnings;
249 if (fold_deferring_overflow_warnings > 0)
251 if (fold_deferred_overflow_warning != NULL
252 && code != 0
253 && code < (int) fold_deferred_overflow_code)
254 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
255 return;
258 warnmsg = fold_deferred_overflow_warning;
259 fold_deferred_overflow_warning = NULL;
261 if (!issue || warnmsg == NULL)
262 return;
264 if (gimple_no_warning_p (stmt))
265 return;
267 /* Use the smallest code level when deciding to issue the
268 warning. */
269 if (code == 0 || code > (int) fold_deferred_overflow_code)
270 code = fold_deferred_overflow_code;
272 if (!issue_strict_overflow_warning (code))
273 return;
275 if (stmt == NULL)
276 locus = input_location;
277 else
278 locus = gimple_location (stmt);
279 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
282 /* Stop deferring overflow warnings, ignoring any deferred
283 warnings. */
285 void
286 fold_undefer_and_ignore_overflow_warnings (void)
288 fold_undefer_overflow_warnings (false, NULL, 0);
291 /* Whether we are deferring overflow warnings. */
293 bool
294 fold_deferring_overflow_warnings_p (void)
296 return fold_deferring_overflow_warnings > 0;
299 /* This is called when we fold something based on the fact that signed
300 overflow is undefined. */
302 static void
303 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
305 if (fold_deferring_overflow_warnings > 0)
307 if (fold_deferred_overflow_warning == NULL
308 || wc < fold_deferred_overflow_code)
310 fold_deferred_overflow_warning = gmsgid;
311 fold_deferred_overflow_code = wc;
314 else if (issue_strict_overflow_warning (wc))
315 warning (OPT_Wstrict_overflow, gmsgid);
318 /* Return true if the built-in mathematical function specified by CODE
319 is odd, i.e. -f(x) == f(-x). */
321 static bool
322 negate_mathfn_p (enum built_in_function code)
324 switch (code)
326 CASE_FLT_FN (BUILT_IN_ASIN):
327 CASE_FLT_FN (BUILT_IN_ASINH):
328 CASE_FLT_FN (BUILT_IN_ATAN):
329 CASE_FLT_FN (BUILT_IN_ATANH):
330 CASE_FLT_FN (BUILT_IN_CASIN):
331 CASE_FLT_FN (BUILT_IN_CASINH):
332 CASE_FLT_FN (BUILT_IN_CATAN):
333 CASE_FLT_FN (BUILT_IN_CATANH):
334 CASE_FLT_FN (BUILT_IN_CBRT):
335 CASE_FLT_FN (BUILT_IN_CPROJ):
336 CASE_FLT_FN (BUILT_IN_CSIN):
337 CASE_FLT_FN (BUILT_IN_CSINH):
338 CASE_FLT_FN (BUILT_IN_CTAN):
339 CASE_FLT_FN (BUILT_IN_CTANH):
340 CASE_FLT_FN (BUILT_IN_ERF):
341 CASE_FLT_FN (BUILT_IN_LLROUND):
342 CASE_FLT_FN (BUILT_IN_LROUND):
343 CASE_FLT_FN (BUILT_IN_ROUND):
344 CASE_FLT_FN (BUILT_IN_SIN):
345 CASE_FLT_FN (BUILT_IN_SINH):
346 CASE_FLT_FN (BUILT_IN_TAN):
347 CASE_FLT_FN (BUILT_IN_TANH):
348 CASE_FLT_FN (BUILT_IN_TRUNC):
349 return true;
351 CASE_FLT_FN (BUILT_IN_LLRINT):
352 CASE_FLT_FN (BUILT_IN_LRINT):
353 CASE_FLT_FN (BUILT_IN_NEARBYINT):
354 CASE_FLT_FN (BUILT_IN_RINT):
355 return !flag_rounding_math;
357 default:
358 break;
360 return false;
363 /* Check whether we may negate an integer constant T without causing
364 overflow. */
366 bool
367 may_negate_without_overflow_p (const_tree t)
369 unsigned HOST_WIDE_INT val;
370 unsigned int prec;
371 tree type;
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
379 prec = TYPE_PRECISION (type);
380 if (prec > HOST_BITS_PER_WIDE_INT)
382 if (TREE_INT_CST_LOW (t) != 0)
383 return true;
384 prec -= HOST_BITS_PER_WIDE_INT;
385 val = TREE_INT_CST_HIGH (t);
387 else
388 val = TREE_INT_CST_LOW (t);
389 if (prec < HOST_BITS_PER_WIDE_INT)
390 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
391 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
394 /* Determine whether an expression T can be cheaply negated using
395 the function negate_expr without introducing undefined overflow. */
397 static bool
398 negate_expr_p (tree t)
400 tree type;
402 if (t == 0)
403 return false;
405 type = TREE_TYPE (t);
407 STRIP_SIGN_NOPS (t);
408 switch (TREE_CODE (t))
410 case INTEGER_CST:
411 if (TYPE_OVERFLOW_WRAPS (type))
412 return true;
414 /* Check that -CST will not overflow type. */
415 return may_negate_without_overflow_p (t);
416 case BIT_NOT_EXPR:
417 return (INTEGRAL_TYPE_P (type)
418 && TYPE_OVERFLOW_WRAPS (type));
420 case FIXED_CST:
421 case NEGATE_EXPR:
422 return true;
424 case REAL_CST:
425 /* We want to canonicalize to positive real constants. Pretend
426 that only negative ones can be easily negated. */
427 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
429 case COMPLEX_CST:
430 return negate_expr_p (TREE_REALPART (t))
431 && negate_expr_p (TREE_IMAGPART (t));
433 case VECTOR_CST:
435 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
436 return true;
438 int count = TYPE_VECTOR_SUBPARTS (type), i;
440 for (i = 0; i < count; i++)
441 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
442 return false;
444 return true;
447 case COMPLEX_EXPR:
448 return negate_expr_p (TREE_OPERAND (t, 0))
449 && negate_expr_p (TREE_OPERAND (t, 1));
451 case CONJ_EXPR:
452 return negate_expr_p (TREE_OPERAND (t, 0));
454 case PLUS_EXPR:
455 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
456 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
457 return false;
458 /* -(A + B) -> (-B) - A. */
459 if (negate_expr_p (TREE_OPERAND (t, 1))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1)))
462 return true;
463 /* -(A + B) -> (-A) - B. */
464 return negate_expr_p (TREE_OPERAND (t, 0));
466 case MINUS_EXPR:
467 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
468 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
469 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1));
473 case MULT_EXPR:
474 if (TYPE_UNSIGNED (TREE_TYPE (t)))
475 break;
477 /* Fall through. */
479 case RDIV_EXPR:
480 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
481 return negate_expr_p (TREE_OPERAND (t, 1))
482 || negate_expr_p (TREE_OPERAND (t, 0));
483 break;
485 case TRUNC_DIV_EXPR:
486 case ROUND_DIV_EXPR:
487 case EXACT_DIV_EXPR:
488 /* In general we can't negate A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. But if overflow is
491 undefined, we can negate, because - (INT_MIN / 1) is an
492 overflow. */
493 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
495 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
496 break;
497 /* If overflow is undefined then we have to be careful because
498 we ask whether it's ok to associate the negate with the
499 division which is not ok for example for
500 -((a - b) / c) where (-(a - b)) / c may invoke undefined
501 overflow because of negating INT_MIN. So do not use
502 negate_expr_p here but open-code the two important cases. */
503 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
504 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
505 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
506 return true;
508 else if (negate_expr_p (TREE_OPERAND (t, 0)))
509 return true;
510 return negate_expr_p (TREE_OPERAND (t, 1));
512 case NOP_EXPR:
513 /* Negate -((double)float) as (double)(-float). */
514 if (TREE_CODE (type) == REAL_TYPE)
516 tree tem = strip_float_extensions (t);
517 if (tem != t)
518 return negate_expr_p (tem);
520 break;
522 case CALL_EXPR:
523 /* Negate -f(x) as f(-x). */
524 if (negate_mathfn_p (builtin_mathfn_code (t)))
525 return negate_expr_p (CALL_EXPR_ARG (t, 0));
526 break;
528 case RSHIFT_EXPR:
529 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
530 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
532 tree op1 = TREE_OPERAND (t, 1);
533 if (TREE_INT_CST_HIGH (op1) == 0
534 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
535 == TREE_INT_CST_LOW (op1))
536 return true;
538 break;
540 default:
541 break;
543 return false;
546 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
547 simplification is possible.
548 If negate_expr_p would return true for T, NULL_TREE will never be
549 returned. */
551 static tree
552 fold_negate_expr (location_t loc, tree t)
554 tree type = TREE_TYPE (t);
555 tree tem;
557 switch (TREE_CODE (t))
559 /* Convert - (~A) to A + 1. */
560 case BIT_NOT_EXPR:
561 if (INTEGRAL_TYPE_P (type))
562 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
563 build_one_cst (type));
564 break;
566 case INTEGER_CST:
567 tem = fold_negate_const (t, type);
568 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
569 || !TYPE_OVERFLOW_TRAPS (type))
570 return tem;
571 break;
573 case REAL_CST:
574 tem = fold_negate_const (t, type);
575 /* Two's complement FP formats, such as c4x, may overflow. */
576 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
577 return tem;
578 break;
580 case FIXED_CST:
581 tem = fold_negate_const (t, type);
582 return tem;
584 case COMPLEX_CST:
586 tree rpart = negate_expr (TREE_REALPART (t));
587 tree ipart = negate_expr (TREE_IMAGPART (t));
589 if ((TREE_CODE (rpart) == REAL_CST
590 && TREE_CODE (ipart) == REAL_CST)
591 || (TREE_CODE (rpart) == INTEGER_CST
592 && TREE_CODE (ipart) == INTEGER_CST))
593 return build_complex (type, rpart, ipart);
595 break;
597 case VECTOR_CST:
599 int count = TYPE_VECTOR_SUBPARTS (type), i;
600 tree *elts = XALLOCAVEC (tree, count);
602 for (i = 0; i < count; i++)
604 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
605 if (elts[i] == NULL_TREE)
606 return NULL_TREE;
609 return build_vector (type, elts);
612 case COMPLEX_EXPR:
613 if (negate_expr_p (t))
614 return fold_build2_loc (loc, COMPLEX_EXPR, type,
615 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
616 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
617 break;
619 case CONJ_EXPR:
620 if (negate_expr_p (t))
621 return fold_build1_loc (loc, CONJ_EXPR, type,
622 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
623 break;
625 case NEGATE_EXPR:
626 return TREE_OPERAND (t, 0);
628 case PLUS_EXPR:
629 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
630 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
632 /* -(A + B) -> (-B) - A. */
633 if (negate_expr_p (TREE_OPERAND (t, 1))
634 && reorder_operands_p (TREE_OPERAND (t, 0),
635 TREE_OPERAND (t, 1)))
637 tem = negate_expr (TREE_OPERAND (t, 1));
638 return fold_build2_loc (loc, MINUS_EXPR, type,
639 tem, TREE_OPERAND (t, 0));
642 /* -(A + B) -> (-A) - B. */
643 if (negate_expr_p (TREE_OPERAND (t, 0)))
645 tem = negate_expr (TREE_OPERAND (t, 0));
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 tem, TREE_OPERAND (t, 1));
650 break;
652 case MINUS_EXPR:
653 /* - (A - B) -> B - A */
654 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
655 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
656 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
657 return fold_build2_loc (loc, MINUS_EXPR, type,
658 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
659 break;
661 case MULT_EXPR:
662 if (TYPE_UNSIGNED (type))
663 break;
665 /* Fall through. */
667 case RDIV_EXPR:
668 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
670 tem = TREE_OPERAND (t, 1);
671 if (negate_expr_p (tem))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 TREE_OPERAND (t, 0), negate_expr (tem));
674 tem = TREE_OPERAND (t, 0);
675 if (negate_expr_p (tem))
676 return fold_build2_loc (loc, TREE_CODE (t), type,
677 negate_expr (tem), TREE_OPERAND (t, 1));
679 break;
681 case TRUNC_DIV_EXPR:
682 case ROUND_DIV_EXPR:
683 case EXACT_DIV_EXPR:
684 /* In general we can't negate A / B, because if A is INT_MIN and
685 B is 1, we may turn this into INT_MIN / -1 which is undefined
686 and actually traps on some architectures. But if overflow is
687 undefined, we can negate, because - (INT_MIN / 1) is an
688 overflow. */
689 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
691 const char * const warnmsg = G_("assuming signed overflow does not "
692 "occur when negating a division");
693 tem = TREE_OPERAND (t, 1);
694 if (negate_expr_p (tem))
696 if (INTEGRAL_TYPE_P (type)
697 && (TREE_CODE (tem) != INTEGER_CST
698 || integer_onep (tem)))
699 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
700 return fold_build2_loc (loc, TREE_CODE (t), type,
701 TREE_OPERAND (t, 0), negate_expr (tem));
703 /* If overflow is undefined then we have to be careful because
704 we ask whether it's ok to associate the negate with the
705 division which is not ok for example for
706 -((a - b) / c) where (-(a - b)) / c may invoke undefined
707 overflow because of negating INT_MIN. So do not use
708 negate_expr_p here but open-code the two important cases. */
709 tem = TREE_OPERAND (t, 0);
710 if ((INTEGRAL_TYPE_P (type)
711 && (TREE_CODE (tem) == NEGATE_EXPR
712 || (TREE_CODE (tem) == INTEGER_CST
713 && may_negate_without_overflow_p (tem))))
714 || !INTEGRAL_TYPE_P (type))
715 return fold_build2_loc (loc, TREE_CODE (t), type,
716 negate_expr (tem), TREE_OPERAND (t, 1));
718 break;
720 case NOP_EXPR:
721 /* Convert -((double)float) into (double)(-float). */
722 if (TREE_CODE (type) == REAL_TYPE)
724 tem = strip_float_extensions (t);
725 if (tem != t && negate_expr_p (tem))
726 return fold_convert_loc (loc, type, negate_expr (tem));
728 break;
730 case CALL_EXPR:
731 /* Negate -f(x) as f(-x). */
732 if (negate_mathfn_p (builtin_mathfn_code (t))
733 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
735 tree fndecl, arg;
737 fndecl = get_callee_fndecl (t);
738 arg = negate_expr (CALL_EXPR_ARG (t, 0));
739 return build_call_expr_loc (loc, fndecl, 1, arg);
741 break;
743 case RSHIFT_EXPR:
744 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
745 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
747 tree op1 = TREE_OPERAND (t, 1);
748 if (TREE_INT_CST_HIGH (op1) == 0
749 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
750 == TREE_INT_CST_LOW (op1))
752 tree ntype = TYPE_UNSIGNED (type)
753 ? signed_type_for (type)
754 : unsigned_type_for (type);
755 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
756 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
757 return fold_convert_loc (loc, type, temp);
760 break;
762 default:
763 break;
766 return NULL_TREE;
769 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
770 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
771 return NULL_TREE. */
773 static tree
774 negate_expr (tree t)
776 tree type, tem;
777 location_t loc;
779 if (t == NULL_TREE)
780 return NULL_TREE;
782 loc = EXPR_LOCATION (t);
783 type = TREE_TYPE (t);
784 STRIP_SIGN_NOPS (t);
786 tem = fold_negate_expr (loc, t);
787 if (!tem)
788 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
789 return fold_convert_loc (loc, type, tem);
792 /* Split a tree IN into a constant, literal and variable parts that could be
793 combined with CODE to make IN. "constant" means an expression with
794 TREE_CONSTANT but that isn't an actual constant. CODE must be a
795 commutative arithmetic operation. Store the constant part into *CONP,
796 the literal in *LITP and return the variable part. If a part isn't
797 present, set it to null. If the tree does not decompose in this way,
798 return the entire tree as the variable part and the other parts as null.
800 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
801 case, we negate an operand that was subtracted. Except if it is a
802 literal for which we use *MINUS_LITP instead.
804 If NEGATE_P is true, we are negating all of IN, again except a literal
805 for which we use *MINUS_LITP instead.
807 If IN is itself a literal or constant, return it as appropriate.
809 Note that we do not guarantee that any of the three values will be the
810 same type as IN, but they will have the same signedness and mode. */
812 static tree
813 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
814 tree *minus_litp, int negate_p)
816 tree var = 0;
818 *conp = 0;
819 *litp = 0;
820 *minus_litp = 0;
822 /* Strip any conversions that don't change the machine mode or signedness. */
823 STRIP_SIGN_NOPS (in);
825 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
826 || TREE_CODE (in) == FIXED_CST)
827 *litp = in;
828 else if (TREE_CODE (in) == code
829 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
830 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
831 /* We can associate addition and subtraction together (even
832 though the C standard doesn't say so) for integers because
833 the value is not affected. For reals, the value might be
834 affected, so we can't. */
835 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
836 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
838 tree op0 = TREE_OPERAND (in, 0);
839 tree op1 = TREE_OPERAND (in, 1);
840 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
841 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
843 /* First see if either of the operands is a literal, then a constant. */
844 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
845 || TREE_CODE (op0) == FIXED_CST)
846 *litp = op0, op0 = 0;
847 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
848 || TREE_CODE (op1) == FIXED_CST)
849 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
851 if (op0 != 0 && TREE_CONSTANT (op0))
852 *conp = op0, op0 = 0;
853 else if (op1 != 0 && TREE_CONSTANT (op1))
854 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
856 /* If we haven't dealt with either operand, this is not a case we can
857 decompose. Otherwise, VAR is either of the ones remaining, if any. */
858 if (op0 != 0 && op1 != 0)
859 var = in;
860 else if (op0 != 0)
861 var = op0;
862 else
863 var = op1, neg_var_p = neg1_p;
865 /* Now do any needed negations. */
866 if (neg_litp_p)
867 *minus_litp = *litp, *litp = 0;
868 if (neg_conp_p)
869 *conp = negate_expr (*conp);
870 if (neg_var_p)
871 var = negate_expr (var);
873 else if (TREE_CODE (in) == BIT_NOT_EXPR
874 && code == PLUS_EXPR)
876 /* -X - 1 is folded to ~X, undo that here. */
877 *minus_litp = build_one_cst (TREE_TYPE (in));
878 var = negate_expr (TREE_OPERAND (in, 0));
880 else if (TREE_CONSTANT (in))
881 *conp = in;
882 else
883 var = in;
885 if (negate_p)
887 if (*litp)
888 *minus_litp = *litp, *litp = 0;
889 else if (*minus_litp)
890 *litp = *minus_litp, *minus_litp = 0;
891 *conp = negate_expr (*conp);
892 var = negate_expr (var);
895 return var;
898 /* Re-associate trees split by the above function. T1 and T2 are
899 either expressions to associate or null. Return the new
900 expression, if any. LOC is the location of the new expression. If
901 we build an operation, do it in TYPE and with CODE. */
903 static tree
904 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
906 if (t1 == 0)
907 return t2;
908 else if (t2 == 0)
909 return t1;
911 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
912 try to fold this since we will have infinite recursion. But do
913 deal with any NEGATE_EXPRs. */
914 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
915 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
917 if (code == PLUS_EXPR)
919 if (TREE_CODE (t1) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t2),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t1, 0)));
924 else if (TREE_CODE (t2) == NEGATE_EXPR)
925 return build2_loc (loc, MINUS_EXPR, type,
926 fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type,
928 TREE_OPERAND (t2, 0)));
929 else if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
932 else if (code == MINUS_EXPR)
934 if (integer_zerop (t2))
935 return fold_convert_loc (loc, type, t1);
938 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
942 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
946 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
947 for use in int_const_binop, size_binop and size_diffop. */
949 static bool
950 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
952 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
953 return false;
954 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
955 return false;
957 switch (code)
959 case LSHIFT_EXPR:
960 case RSHIFT_EXPR:
961 case LROTATE_EXPR:
962 case RROTATE_EXPR:
963 return true;
965 default:
966 break;
969 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
970 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
971 && TYPE_MODE (type1) == TYPE_MODE (type2);
975 /* Combine two integer constants ARG1 and ARG2 under operation CODE
976 to produce a new constant. Return NULL_TREE if we don't know how
977 to evaluate CODE at compile-time. */
979 static tree
980 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
981 int overflowable)
983 double_int op1, op2, res, tmp;
984 tree t;
985 tree type = TREE_TYPE (arg1);
986 bool uns = TYPE_UNSIGNED (type);
987 bool overflow = false;
989 op1 = tree_to_double_int (arg1);
990 op2 = tree_to_double_int (arg2);
992 switch (code)
994 case BIT_IOR_EXPR:
995 res = op1 | op2;
996 break;
998 case BIT_XOR_EXPR:
999 res = op1 ^ op2;
1000 break;
1002 case BIT_AND_EXPR:
1003 res = op1 & op2;
1004 break;
1006 case RSHIFT_EXPR:
1007 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1008 break;
1010 case LSHIFT_EXPR:
1011 /* It's unclear from the C standard whether shifts can overflow.
1012 The following code ignores overflow; perhaps a C standard
1013 interpretation ruling is needed. */
1014 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1015 break;
1017 case RROTATE_EXPR:
1018 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1019 break;
1021 case LROTATE_EXPR:
1022 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1023 break;
1025 case PLUS_EXPR:
1026 res = op1.add_with_sign (op2, false, &overflow);
1027 break;
1029 case MINUS_EXPR:
1030 res = op1.sub_with_overflow (op2, &overflow);
1031 break;
1033 case MULT_EXPR:
1034 res = op1.mul_with_sign (op2, false, &overflow);
1035 break;
1037 case MULT_HIGHPART_EXPR:
1038 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1040 bool dummy_overflow;
1041 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1042 return NULL_TREE;
1043 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1045 else
1047 bool dummy_overflow;
1048 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1049 is performed in twice the precision of arguments. */
1050 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1051 res = tmp.rshift (TYPE_PRECISION (type),
1052 2 * TYPE_PRECISION (type), !uns);
1054 break;
1056 case TRUNC_DIV_EXPR:
1057 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1058 case EXACT_DIV_EXPR:
1059 /* This is a shortcut for a common special case. */
1060 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1061 && !TREE_OVERFLOW (arg1)
1062 && !TREE_OVERFLOW (arg2)
1063 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1065 if (code == CEIL_DIV_EXPR)
1066 op1.low += op2.low - 1;
1068 res.low = op1.low / op2.low, res.high = 0;
1069 break;
1072 /* ... fall through ... */
1074 case ROUND_DIV_EXPR:
1075 if (op2.is_zero ())
1076 return NULL_TREE;
1077 if (op2.is_one ())
1079 res = op1;
1080 break;
1082 if (op1 == op2 && !op1.is_zero ())
1084 res = double_int_one;
1085 break;
1087 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1088 break;
1090 case TRUNC_MOD_EXPR:
1091 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1092 /* This is a shortcut for a common special case. */
1093 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1094 && !TREE_OVERFLOW (arg1)
1095 && !TREE_OVERFLOW (arg2)
1096 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1098 if (code == CEIL_MOD_EXPR)
1099 op1.low += op2.low - 1;
1100 res.low = op1.low % op2.low, res.high = 0;
1101 break;
1104 /* ... fall through ... */
1106 case ROUND_MOD_EXPR:
1107 if (op2.is_zero ())
1108 return NULL_TREE;
1110 /* Check for the case the case of INT_MIN % -1 and return
1111 overflow and result = 0. The TImode case is handled properly
1112 in double-int. */
1113 if (TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT
1114 && !uns
1115 && op2.is_minus_one ()
1116 && op1.high == (HOST_WIDE_INT) -1
1117 && (HOST_WIDE_INT) op1.low
1118 == (((HOST_WIDE_INT)-1) << (TYPE_PRECISION (type) - 1)))
1120 overflow = 1;
1121 res = double_int_zero;
1123 else
1124 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1125 break;
1127 case MIN_EXPR:
1128 res = op1.min (op2, uns);
1129 break;
1131 case MAX_EXPR:
1132 res = op1.max (op2, uns);
1133 break;
1135 default:
1136 return NULL_TREE;
1139 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1140 (!uns && overflow)
1141 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1143 return t;
1146 tree
1147 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1149 return int_const_binop_1 (code, arg1, arg2, 1);
1152 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1153 constant. We assume ARG1 and ARG2 have the same data type, or at least
1154 are the same kind of constant and the same machine mode. Return zero if
1155 combining the constants is not allowed in the current operating mode. */
1157 static tree
1158 const_binop (enum tree_code code, tree arg1, tree arg2)
1160 /* Sanity check for the recursive cases. */
1161 if (!arg1 || !arg2)
1162 return NULL_TREE;
1164 STRIP_NOPS (arg1);
1165 STRIP_NOPS (arg2);
1167 if (TREE_CODE (arg1) == INTEGER_CST)
1168 return int_const_binop (code, arg1, arg2);
1170 if (TREE_CODE (arg1) == REAL_CST)
1172 enum machine_mode mode;
1173 REAL_VALUE_TYPE d1;
1174 REAL_VALUE_TYPE d2;
1175 REAL_VALUE_TYPE value;
1176 REAL_VALUE_TYPE result;
1177 bool inexact;
1178 tree t, type;
1180 /* The following codes are handled by real_arithmetic. */
1181 switch (code)
1183 case PLUS_EXPR:
1184 case MINUS_EXPR:
1185 case MULT_EXPR:
1186 case RDIV_EXPR:
1187 case MIN_EXPR:
1188 case MAX_EXPR:
1189 break;
1191 default:
1192 return NULL_TREE;
1195 d1 = TREE_REAL_CST (arg1);
1196 d2 = TREE_REAL_CST (arg2);
1198 type = TREE_TYPE (arg1);
1199 mode = TYPE_MODE (type);
1201 /* Don't perform operation if we honor signaling NaNs and
1202 either operand is a NaN. */
1203 if (HONOR_SNANS (mode)
1204 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1205 return NULL_TREE;
1207 /* Don't perform operation if it would raise a division
1208 by zero exception. */
1209 if (code == RDIV_EXPR
1210 && REAL_VALUES_EQUAL (d2, dconst0)
1211 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1212 return NULL_TREE;
1214 /* If either operand is a NaN, just return it. Otherwise, set up
1215 for floating-point trap; we return an overflow. */
1216 if (REAL_VALUE_ISNAN (d1))
1217 return arg1;
1218 else if (REAL_VALUE_ISNAN (d2))
1219 return arg2;
1221 inexact = real_arithmetic (&value, code, &d1, &d2);
1222 real_convert (&result, mode, &value);
1224 /* Don't constant fold this floating point operation if
1225 the result has overflowed and flag_trapping_math. */
1226 if (flag_trapping_math
1227 && MODE_HAS_INFINITIES (mode)
1228 && REAL_VALUE_ISINF (result)
1229 && !REAL_VALUE_ISINF (d1)
1230 && !REAL_VALUE_ISINF (d2))
1231 return NULL_TREE;
1233 /* Don't constant fold this floating point operation if the
1234 result may dependent upon the run-time rounding mode and
1235 flag_rounding_math is set, or if GCC's software emulation
1236 is unable to accurately represent the result. */
1237 if ((flag_rounding_math
1238 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1239 && (inexact || !real_identical (&result, &value)))
1240 return NULL_TREE;
1242 t = build_real (type, result);
1244 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1245 return t;
1248 if (TREE_CODE (arg1) == FIXED_CST)
1250 FIXED_VALUE_TYPE f1;
1251 FIXED_VALUE_TYPE f2;
1252 FIXED_VALUE_TYPE result;
1253 tree t, type;
1254 int sat_p;
1255 bool overflow_p;
1257 /* The following codes are handled by fixed_arithmetic. */
1258 switch (code)
1260 case PLUS_EXPR:
1261 case MINUS_EXPR:
1262 case MULT_EXPR:
1263 case TRUNC_DIV_EXPR:
1264 f2 = TREE_FIXED_CST (arg2);
1265 break;
1267 case LSHIFT_EXPR:
1268 case RSHIFT_EXPR:
1269 f2.data.high = TREE_INT_CST_HIGH (arg2);
1270 f2.data.low = TREE_INT_CST_LOW (arg2);
1271 f2.mode = SImode;
1272 break;
1274 default:
1275 return NULL_TREE;
1278 f1 = TREE_FIXED_CST (arg1);
1279 type = TREE_TYPE (arg1);
1280 sat_p = TYPE_SATURATING (type);
1281 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1282 t = build_fixed (type, result);
1283 /* Propagate overflow flags. */
1284 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1285 TREE_OVERFLOW (t) = 1;
1286 return t;
1289 if (TREE_CODE (arg1) == COMPLEX_CST)
1291 tree type = TREE_TYPE (arg1);
1292 tree r1 = TREE_REALPART (arg1);
1293 tree i1 = TREE_IMAGPART (arg1);
1294 tree r2 = TREE_REALPART (arg2);
1295 tree i2 = TREE_IMAGPART (arg2);
1296 tree real, imag;
1298 switch (code)
1300 case PLUS_EXPR:
1301 case MINUS_EXPR:
1302 real = const_binop (code, r1, r2);
1303 imag = const_binop (code, i1, i2);
1304 break;
1306 case MULT_EXPR:
1307 if (COMPLEX_FLOAT_TYPE_P (type))
1308 return do_mpc_arg2 (arg1, arg2, type,
1309 /* do_nonfinite= */ folding_initializer,
1310 mpc_mul);
1312 real = const_binop (MINUS_EXPR,
1313 const_binop (MULT_EXPR, r1, r2),
1314 const_binop (MULT_EXPR, i1, i2));
1315 imag = const_binop (PLUS_EXPR,
1316 const_binop (MULT_EXPR, r1, i2),
1317 const_binop (MULT_EXPR, i1, r2));
1318 break;
1320 case RDIV_EXPR:
1321 if (COMPLEX_FLOAT_TYPE_P (type))
1322 return do_mpc_arg2 (arg1, arg2, type,
1323 /* do_nonfinite= */ folding_initializer,
1324 mpc_div);
1325 /* Fallthru ... */
1326 case TRUNC_DIV_EXPR:
1327 case CEIL_DIV_EXPR:
1328 case FLOOR_DIV_EXPR:
1329 case ROUND_DIV_EXPR:
1330 if (flag_complex_method == 0)
1332 /* Keep this algorithm in sync with
1333 tree-complex.c:expand_complex_div_straight().
1335 Expand complex division to scalars, straightforward algorithm.
1336 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1337 t = br*br + bi*bi
1339 tree magsquared
1340 = const_binop (PLUS_EXPR,
1341 const_binop (MULT_EXPR, r2, r2),
1342 const_binop (MULT_EXPR, i2, i2));
1343 tree t1
1344 = const_binop (PLUS_EXPR,
1345 const_binop (MULT_EXPR, r1, r2),
1346 const_binop (MULT_EXPR, i1, i2));
1347 tree t2
1348 = const_binop (MINUS_EXPR,
1349 const_binop (MULT_EXPR, i1, r2),
1350 const_binop (MULT_EXPR, r1, i2));
1352 real = const_binop (code, t1, magsquared);
1353 imag = const_binop (code, t2, magsquared);
1355 else
1357 /* Keep this algorithm in sync with
1358 tree-complex.c:expand_complex_div_wide().
1360 Expand complex division to scalars, modified algorithm to minimize
1361 overflow with wide input ranges. */
1362 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1363 fold_abs_const (r2, TREE_TYPE (type)),
1364 fold_abs_const (i2, TREE_TYPE (type)));
1366 if (integer_nonzerop (compare))
1368 /* In the TRUE branch, we compute
1369 ratio = br/bi;
1370 div = (br * ratio) + bi;
1371 tr = (ar * ratio) + ai;
1372 ti = (ai * ratio) - ar;
1373 tr = tr / div;
1374 ti = ti / div; */
1375 tree ratio = const_binop (code, r2, i2);
1376 tree div = const_binop (PLUS_EXPR, i2,
1377 const_binop (MULT_EXPR, r2, ratio));
1378 real = const_binop (MULT_EXPR, r1, ratio);
1379 real = const_binop (PLUS_EXPR, real, i1);
1380 real = const_binop (code, real, div);
1382 imag = const_binop (MULT_EXPR, i1, ratio);
1383 imag = const_binop (MINUS_EXPR, imag, r1);
1384 imag = const_binop (code, imag, div);
1386 else
1388 /* In the FALSE branch, we compute
1389 ratio = d/c;
1390 divisor = (d * ratio) + c;
1391 tr = (b * ratio) + a;
1392 ti = b - (a * ratio);
1393 tr = tr / div;
1394 ti = ti / div; */
1395 tree ratio = const_binop (code, i2, r2);
1396 tree div = const_binop (PLUS_EXPR, r2,
1397 const_binop (MULT_EXPR, i2, ratio));
1399 real = const_binop (MULT_EXPR, i1, ratio);
1400 real = const_binop (PLUS_EXPR, real, r1);
1401 real = const_binop (code, real, div);
1403 imag = const_binop (MULT_EXPR, r1, ratio);
1404 imag = const_binop (MINUS_EXPR, i1, imag);
1405 imag = const_binop (code, imag, div);
1408 break;
1410 default:
1411 return NULL_TREE;
1414 if (real && imag)
1415 return build_complex (type, real, imag);
1418 if (TREE_CODE (arg1) == VECTOR_CST
1419 && TREE_CODE (arg2) == VECTOR_CST)
1421 tree type = TREE_TYPE (arg1);
1422 int count = TYPE_VECTOR_SUBPARTS (type), i;
1423 tree *elts = XALLOCAVEC (tree, count);
1425 for (i = 0; i < count; i++)
1427 tree elem1 = VECTOR_CST_ELT (arg1, i);
1428 tree elem2 = VECTOR_CST_ELT (arg2, i);
1430 elts[i] = const_binop (code, elem1, elem2);
1432 /* It is possible that const_binop cannot handle the given
1433 code and return NULL_TREE */
1434 if (elts[i] == NULL_TREE)
1435 return NULL_TREE;
1438 return build_vector (type, elts);
1441 /* Shifts allow a scalar offset for a vector. */
1442 if (TREE_CODE (arg1) == VECTOR_CST
1443 && TREE_CODE (arg2) == INTEGER_CST)
1445 tree type = TREE_TYPE (arg1);
1446 int count = TYPE_VECTOR_SUBPARTS (type), i;
1447 tree *elts = XALLOCAVEC (tree, count);
1449 if (code == VEC_LSHIFT_EXPR
1450 || code == VEC_RSHIFT_EXPR)
1452 if (!tree_fits_uhwi_p (arg2))
1453 return NULL_TREE;
1455 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1456 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1457 unsigned HOST_WIDE_INT innerc
1458 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1459 if (shiftc >= outerc || (shiftc % innerc) != 0)
1460 return NULL_TREE;
1461 int offset = shiftc / innerc;
1462 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1463 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1464 for !BYTES_BIG_ENDIAN picks first vector element, but
1465 for BYTES_BIG_ENDIAN last element from the vector. */
1466 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1467 offset = -offset;
1468 tree zero = build_zero_cst (TREE_TYPE (type));
1469 for (i = 0; i < count; i++)
1471 if (i + offset < 0 || i + offset >= count)
1472 elts[i] = zero;
1473 else
1474 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1477 else
1478 for (i = 0; i < count; i++)
1480 tree elem1 = VECTOR_CST_ELT (arg1, i);
1482 elts[i] = const_binop (code, elem1, arg2);
1484 /* It is possible that const_binop cannot handle the given
1485 code and return NULL_TREE */
1486 if (elts[i] == NULL_TREE)
1487 return NULL_TREE;
1490 return build_vector (type, elts);
1492 return NULL_TREE;
1495 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1496 indicates which particular sizetype to create. */
1498 tree
1499 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1501 return build_int_cst (sizetype_tab[(int) kind], number);
1504 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1505 is a tree code. The type of the result is taken from the operands.
1506 Both must be equivalent integer types, ala int_binop_types_match_p.
1507 If the operands are constant, so is the result. */
1509 tree
1510 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1512 tree type = TREE_TYPE (arg0);
1514 if (arg0 == error_mark_node || arg1 == error_mark_node)
1515 return error_mark_node;
1517 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1518 TREE_TYPE (arg1)));
1520 /* Handle the special case of two integer constants faster. */
1521 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1523 /* And some specific cases even faster than that. */
1524 if (code == PLUS_EXPR)
1526 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1527 return arg1;
1528 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1529 return arg0;
1531 else if (code == MINUS_EXPR)
1533 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1534 return arg0;
1536 else if (code == MULT_EXPR)
1538 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1539 return arg1;
1542 /* Handle general case of two integer constants. For sizetype
1543 constant calculations we always want to know about overflow,
1544 even in the unsigned case. */
1545 return int_const_binop_1 (code, arg0, arg1, -1);
1548 return fold_build2_loc (loc, code, type, arg0, arg1);
1551 /* Given two values, either both of sizetype or both of bitsizetype,
1552 compute the difference between the two values. Return the value
1553 in signed type corresponding to the type of the operands. */
1555 tree
1556 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1558 tree type = TREE_TYPE (arg0);
1559 tree ctype;
1561 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1562 TREE_TYPE (arg1)));
1564 /* If the type is already signed, just do the simple thing. */
1565 if (!TYPE_UNSIGNED (type))
1566 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1568 if (type == sizetype)
1569 ctype = ssizetype;
1570 else if (type == bitsizetype)
1571 ctype = sbitsizetype;
1572 else
1573 ctype = signed_type_for (type);
1575 /* If either operand is not a constant, do the conversions to the signed
1576 type and subtract. The hardware will do the right thing with any
1577 overflow in the subtraction. */
1578 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1579 return size_binop_loc (loc, MINUS_EXPR,
1580 fold_convert_loc (loc, ctype, arg0),
1581 fold_convert_loc (loc, ctype, arg1));
1583 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1584 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1585 overflow) and negate (which can't either). Special-case a result
1586 of zero while we're here. */
1587 if (tree_int_cst_equal (arg0, arg1))
1588 return build_int_cst (ctype, 0);
1589 else if (tree_int_cst_lt (arg1, arg0))
1590 return fold_convert_loc (loc, ctype,
1591 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1592 else
1593 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1594 fold_convert_loc (loc, ctype,
1595 size_binop_loc (loc,
1596 MINUS_EXPR,
1597 arg1, arg0)));
1600 /* A subroutine of fold_convert_const handling conversions of an
1601 INTEGER_CST to another integer type. */
1603 static tree
1604 fold_convert_const_int_from_int (tree type, const_tree arg1)
1606 tree t;
1608 /* Given an integer constant, make new constant with new type,
1609 appropriately sign-extended or truncated. */
1610 t = force_fit_type_double (type, tree_to_double_int (arg1),
1611 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1612 (TREE_INT_CST_HIGH (arg1) < 0
1613 && (TYPE_UNSIGNED (type)
1614 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1615 | TREE_OVERFLOW (arg1));
1617 return t;
1620 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1621 to an integer type. */
1623 static tree
1624 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1626 int overflow = 0;
1627 tree t;
1629 /* The following code implements the floating point to integer
1630 conversion rules required by the Java Language Specification,
1631 that IEEE NaNs are mapped to zero and values that overflow
1632 the target precision saturate, i.e. values greater than
1633 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1634 are mapped to INT_MIN. These semantics are allowed by the
1635 C and C++ standards that simply state that the behavior of
1636 FP-to-integer conversion is unspecified upon overflow. */
1638 double_int val;
1639 REAL_VALUE_TYPE r;
1640 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1642 switch (code)
1644 case FIX_TRUNC_EXPR:
1645 real_trunc (&r, VOIDmode, &x);
1646 break;
1648 default:
1649 gcc_unreachable ();
1652 /* If R is NaN, return zero and show we have an overflow. */
1653 if (REAL_VALUE_ISNAN (r))
1655 overflow = 1;
1656 val = double_int_zero;
1659 /* See if R is less than the lower bound or greater than the
1660 upper bound. */
1662 if (! overflow)
1664 tree lt = TYPE_MIN_VALUE (type);
1665 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1666 if (REAL_VALUES_LESS (r, l))
1668 overflow = 1;
1669 val = tree_to_double_int (lt);
1673 if (! overflow)
1675 tree ut = TYPE_MAX_VALUE (type);
1676 if (ut)
1678 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1679 if (REAL_VALUES_LESS (u, r))
1681 overflow = 1;
1682 val = tree_to_double_int (ut);
1687 if (! overflow)
1688 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1690 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1691 return t;
1694 /* A subroutine of fold_convert_const handling conversions of a
1695 FIXED_CST to an integer type. */
1697 static tree
1698 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1700 tree t;
1701 double_int temp, temp_trunc;
1702 unsigned int mode;
1704 /* Right shift FIXED_CST to temp by fbit. */
1705 temp = TREE_FIXED_CST (arg1).data;
1706 mode = TREE_FIXED_CST (arg1).mode;
1707 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1709 temp = temp.rshift (GET_MODE_FBIT (mode),
1710 HOST_BITS_PER_DOUBLE_INT,
1711 SIGNED_FIXED_POINT_MODE_P (mode));
1713 /* Left shift temp to temp_trunc by fbit. */
1714 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1715 HOST_BITS_PER_DOUBLE_INT,
1716 SIGNED_FIXED_POINT_MODE_P (mode));
1718 else
1720 temp = double_int_zero;
1721 temp_trunc = double_int_zero;
1724 /* If FIXED_CST is negative, we need to round the value toward 0.
1725 By checking if the fractional bits are not zero to add 1 to temp. */
1726 if (SIGNED_FIXED_POINT_MODE_P (mode)
1727 && temp_trunc.is_negative ()
1728 && TREE_FIXED_CST (arg1).data != temp_trunc)
1729 temp += double_int_one;
1731 /* Given a fixed-point constant, make new constant with new type,
1732 appropriately sign-extended or truncated. */
1733 t = force_fit_type_double (type, temp, -1,
1734 (temp.is_negative ()
1735 && (TYPE_UNSIGNED (type)
1736 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1737 | TREE_OVERFLOW (arg1));
1739 return t;
1742 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1743 to another floating point type. */
1745 static tree
1746 fold_convert_const_real_from_real (tree type, const_tree arg1)
1748 REAL_VALUE_TYPE value;
1749 tree t;
1751 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1752 t = build_real (type, value);
1754 /* If converting an infinity or NAN to a representation that doesn't
1755 have one, set the overflow bit so that we can produce some kind of
1756 error message at the appropriate point if necessary. It's not the
1757 most user-friendly message, but it's better than nothing. */
1758 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1759 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1760 TREE_OVERFLOW (t) = 1;
1761 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1762 && !MODE_HAS_NANS (TYPE_MODE (type)))
1763 TREE_OVERFLOW (t) = 1;
1764 /* Regular overflow, conversion produced an infinity in a mode that
1765 can't represent them. */
1766 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1767 && REAL_VALUE_ISINF (value)
1768 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1769 TREE_OVERFLOW (t) = 1;
1770 else
1771 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1772 return t;
1775 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1776 to a floating point type. */
1778 static tree
1779 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1781 REAL_VALUE_TYPE value;
1782 tree t;
1784 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1785 t = build_real (type, value);
1787 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1788 return t;
1791 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1792 to another fixed-point type. */
1794 static tree
1795 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1797 FIXED_VALUE_TYPE value;
1798 tree t;
1799 bool overflow_p;
1801 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1802 TYPE_SATURATING (type));
1803 t = build_fixed (type, value);
1805 /* Propagate overflow flags. */
1806 if (overflow_p | TREE_OVERFLOW (arg1))
1807 TREE_OVERFLOW (t) = 1;
1808 return t;
1811 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1812 to a fixed-point type. */
1814 static tree
1815 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1817 FIXED_VALUE_TYPE value;
1818 tree t;
1819 bool overflow_p;
1821 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1822 TREE_INT_CST (arg1),
1823 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1824 TYPE_SATURATING (type));
1825 t = build_fixed (type, value);
1827 /* Propagate overflow flags. */
1828 if (overflow_p | TREE_OVERFLOW (arg1))
1829 TREE_OVERFLOW (t) = 1;
1830 return t;
1833 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1834 to a fixed-point type. */
1836 static tree
1837 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1839 FIXED_VALUE_TYPE value;
1840 tree t;
1841 bool overflow_p;
1843 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1844 &TREE_REAL_CST (arg1),
1845 TYPE_SATURATING (type));
1846 t = build_fixed (type, value);
1848 /* Propagate overflow flags. */
1849 if (overflow_p | TREE_OVERFLOW (arg1))
1850 TREE_OVERFLOW (t) = 1;
1851 return t;
1854 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1855 type TYPE. If no simplification can be done return NULL_TREE. */
1857 static tree
1858 fold_convert_const (enum tree_code code, tree type, tree arg1)
1860 if (TREE_TYPE (arg1) == type)
1861 return arg1;
1863 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1864 || TREE_CODE (type) == OFFSET_TYPE)
1866 if (TREE_CODE (arg1) == INTEGER_CST)
1867 return fold_convert_const_int_from_int (type, arg1);
1868 else if (TREE_CODE (arg1) == REAL_CST)
1869 return fold_convert_const_int_from_real (code, type, arg1);
1870 else if (TREE_CODE (arg1) == FIXED_CST)
1871 return fold_convert_const_int_from_fixed (type, arg1);
1873 else if (TREE_CODE (type) == REAL_TYPE)
1875 if (TREE_CODE (arg1) == INTEGER_CST)
1876 return build_real_from_int_cst (type, arg1);
1877 else if (TREE_CODE (arg1) == REAL_CST)
1878 return fold_convert_const_real_from_real (type, arg1);
1879 else if (TREE_CODE (arg1) == FIXED_CST)
1880 return fold_convert_const_real_from_fixed (type, arg1);
1882 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1884 if (TREE_CODE (arg1) == FIXED_CST)
1885 return fold_convert_const_fixed_from_fixed (type, arg1);
1886 else if (TREE_CODE (arg1) == INTEGER_CST)
1887 return fold_convert_const_fixed_from_int (type, arg1);
1888 else if (TREE_CODE (arg1) == REAL_CST)
1889 return fold_convert_const_fixed_from_real (type, arg1);
1891 return NULL_TREE;
1894 /* Construct a vector of zero elements of vector type TYPE. */
1896 static tree
1897 build_zero_vector (tree type)
1899 tree t;
1901 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1902 return build_vector_from_val (type, t);
1905 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1907 bool
1908 fold_convertible_p (const_tree type, const_tree arg)
1910 tree orig = TREE_TYPE (arg);
1912 if (type == orig)
1913 return true;
1915 if (TREE_CODE (arg) == ERROR_MARK
1916 || TREE_CODE (type) == ERROR_MARK
1917 || TREE_CODE (orig) == ERROR_MARK)
1918 return false;
1920 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1921 return true;
1923 switch (TREE_CODE (type))
1925 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1926 case POINTER_TYPE: case REFERENCE_TYPE:
1927 case OFFSET_TYPE:
1928 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1929 || TREE_CODE (orig) == OFFSET_TYPE)
1930 return true;
1931 return (TREE_CODE (orig) == VECTOR_TYPE
1932 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1934 case REAL_TYPE:
1935 case FIXED_POINT_TYPE:
1936 case COMPLEX_TYPE:
1937 case VECTOR_TYPE:
1938 case VOID_TYPE:
1939 return TREE_CODE (type) == TREE_CODE (orig);
1941 default:
1942 return false;
1946 /* Convert expression ARG to type TYPE. Used by the middle-end for
1947 simple conversions in preference to calling the front-end's convert. */
1949 tree
1950 fold_convert_loc (location_t loc, tree type, tree arg)
1952 tree orig = TREE_TYPE (arg);
1953 tree tem;
1955 if (type == orig)
1956 return arg;
1958 if (TREE_CODE (arg) == ERROR_MARK
1959 || TREE_CODE (type) == ERROR_MARK
1960 || TREE_CODE (orig) == ERROR_MARK)
1961 return error_mark_node;
1963 switch (TREE_CODE (type))
1965 case POINTER_TYPE:
1966 case REFERENCE_TYPE:
1967 /* Handle conversions between pointers to different address spaces. */
1968 if (POINTER_TYPE_P (orig)
1969 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1970 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1971 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1972 /* fall through */
1974 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1975 case OFFSET_TYPE:
1976 if (TREE_CODE (arg) == INTEGER_CST)
1978 tem = fold_convert_const (NOP_EXPR, type, arg);
1979 if (tem != NULL_TREE)
1980 return tem;
1982 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1983 || TREE_CODE (orig) == OFFSET_TYPE)
1984 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1985 if (TREE_CODE (orig) == COMPLEX_TYPE)
1986 return fold_convert_loc (loc, type,
1987 fold_build1_loc (loc, REALPART_EXPR,
1988 TREE_TYPE (orig), arg));
1989 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1990 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1991 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1993 case REAL_TYPE:
1994 if (TREE_CODE (arg) == INTEGER_CST)
1996 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1997 if (tem != NULL_TREE)
1998 return tem;
2000 else if (TREE_CODE (arg) == REAL_CST)
2002 tem = fold_convert_const (NOP_EXPR, type, arg);
2003 if (tem != NULL_TREE)
2004 return tem;
2006 else if (TREE_CODE (arg) == FIXED_CST)
2008 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2009 if (tem != NULL_TREE)
2010 return tem;
2013 switch (TREE_CODE (orig))
2015 case INTEGER_TYPE:
2016 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2017 case POINTER_TYPE: case REFERENCE_TYPE:
2018 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2020 case REAL_TYPE:
2021 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2023 case FIXED_POINT_TYPE:
2024 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2026 case COMPLEX_TYPE:
2027 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2028 return fold_convert_loc (loc, type, tem);
2030 default:
2031 gcc_unreachable ();
2034 case FIXED_POINT_TYPE:
2035 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2036 || TREE_CODE (arg) == REAL_CST)
2038 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2039 if (tem != NULL_TREE)
2040 goto fold_convert_exit;
2043 switch (TREE_CODE (orig))
2045 case FIXED_POINT_TYPE:
2046 case INTEGER_TYPE:
2047 case ENUMERAL_TYPE:
2048 case BOOLEAN_TYPE:
2049 case REAL_TYPE:
2050 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2052 case COMPLEX_TYPE:
2053 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2054 return fold_convert_loc (loc, type, tem);
2056 default:
2057 gcc_unreachable ();
2060 case COMPLEX_TYPE:
2061 switch (TREE_CODE (orig))
2063 case INTEGER_TYPE:
2064 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2065 case POINTER_TYPE: case REFERENCE_TYPE:
2066 case REAL_TYPE:
2067 case FIXED_POINT_TYPE:
2068 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2069 fold_convert_loc (loc, TREE_TYPE (type), arg),
2070 fold_convert_loc (loc, TREE_TYPE (type),
2071 integer_zero_node));
2072 case COMPLEX_TYPE:
2074 tree rpart, ipart;
2076 if (TREE_CODE (arg) == COMPLEX_EXPR)
2078 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2079 TREE_OPERAND (arg, 0));
2080 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2081 TREE_OPERAND (arg, 1));
2082 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2085 arg = save_expr (arg);
2086 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2087 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2088 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2089 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2090 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2093 default:
2094 gcc_unreachable ();
2097 case VECTOR_TYPE:
2098 if (integer_zerop (arg))
2099 return build_zero_vector (type);
2100 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2101 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2102 || TREE_CODE (orig) == VECTOR_TYPE);
2103 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2105 case VOID_TYPE:
2106 tem = fold_ignored_result (arg);
2107 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2109 default:
2110 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2111 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2112 gcc_unreachable ();
2114 fold_convert_exit:
2115 protected_set_expr_location_unshare (tem, loc);
2116 return tem;
2119 /* Return false if expr can be assumed not to be an lvalue, true
2120 otherwise. */
2122 static bool
2123 maybe_lvalue_p (const_tree x)
2125 /* We only need to wrap lvalue tree codes. */
2126 switch (TREE_CODE (x))
2128 case VAR_DECL:
2129 case PARM_DECL:
2130 case RESULT_DECL:
2131 case LABEL_DECL:
2132 case FUNCTION_DECL:
2133 case SSA_NAME:
2135 case COMPONENT_REF:
2136 case MEM_REF:
2137 case INDIRECT_REF:
2138 case ARRAY_REF:
2139 case ARRAY_RANGE_REF:
2140 case BIT_FIELD_REF:
2141 case OBJ_TYPE_REF:
2143 case REALPART_EXPR:
2144 case IMAGPART_EXPR:
2145 case PREINCREMENT_EXPR:
2146 case PREDECREMENT_EXPR:
2147 case SAVE_EXPR:
2148 case TRY_CATCH_EXPR:
2149 case WITH_CLEANUP_EXPR:
2150 case COMPOUND_EXPR:
2151 case MODIFY_EXPR:
2152 case TARGET_EXPR:
2153 case COND_EXPR:
2154 case BIND_EXPR:
2155 break;
2157 default:
2158 /* Assume the worst for front-end tree codes. */
2159 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2160 break;
2161 return false;
2164 return true;
2167 /* Return an expr equal to X but certainly not valid as an lvalue. */
2169 tree
2170 non_lvalue_loc (location_t loc, tree x)
2172 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2173 us. */
2174 if (in_gimple_form)
2175 return x;
2177 if (! maybe_lvalue_p (x))
2178 return x;
2179 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2182 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2183 Zero means allow extended lvalues. */
2185 int pedantic_lvalues;
2187 /* When pedantic, return an expr equal to X but certainly not valid as a
2188 pedantic lvalue. Otherwise, return X. */
2190 static tree
2191 pedantic_non_lvalue_loc (location_t loc, tree x)
2193 if (pedantic_lvalues)
2194 return non_lvalue_loc (loc, x);
2196 return protected_set_expr_location_unshare (x, loc);
2199 /* Given a tree comparison code, return the code that is the logical inverse.
2200 It is generally not safe to do this for floating-point comparisons, except
2201 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2202 ERROR_MARK in this case. */
2204 enum tree_code
2205 invert_tree_comparison (enum tree_code code, bool honor_nans)
2207 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2208 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2209 return ERROR_MARK;
2211 switch (code)
2213 case EQ_EXPR:
2214 return NE_EXPR;
2215 case NE_EXPR:
2216 return EQ_EXPR;
2217 case GT_EXPR:
2218 return honor_nans ? UNLE_EXPR : LE_EXPR;
2219 case GE_EXPR:
2220 return honor_nans ? UNLT_EXPR : LT_EXPR;
2221 case LT_EXPR:
2222 return honor_nans ? UNGE_EXPR : GE_EXPR;
2223 case LE_EXPR:
2224 return honor_nans ? UNGT_EXPR : GT_EXPR;
2225 case LTGT_EXPR:
2226 return UNEQ_EXPR;
2227 case UNEQ_EXPR:
2228 return LTGT_EXPR;
2229 case UNGT_EXPR:
2230 return LE_EXPR;
2231 case UNGE_EXPR:
2232 return LT_EXPR;
2233 case UNLT_EXPR:
2234 return GE_EXPR;
2235 case UNLE_EXPR:
2236 return GT_EXPR;
2237 case ORDERED_EXPR:
2238 return UNORDERED_EXPR;
2239 case UNORDERED_EXPR:
2240 return ORDERED_EXPR;
2241 default:
2242 gcc_unreachable ();
2246 /* Similar, but return the comparison that results if the operands are
2247 swapped. This is safe for floating-point. */
2249 enum tree_code
2250 swap_tree_comparison (enum tree_code code)
2252 switch (code)
2254 case EQ_EXPR:
2255 case NE_EXPR:
2256 case ORDERED_EXPR:
2257 case UNORDERED_EXPR:
2258 case LTGT_EXPR:
2259 case UNEQ_EXPR:
2260 return code;
2261 case GT_EXPR:
2262 return LT_EXPR;
2263 case GE_EXPR:
2264 return LE_EXPR;
2265 case LT_EXPR:
2266 return GT_EXPR;
2267 case LE_EXPR:
2268 return GE_EXPR;
2269 case UNGT_EXPR:
2270 return UNLT_EXPR;
2271 case UNGE_EXPR:
2272 return UNLE_EXPR;
2273 case UNLT_EXPR:
2274 return UNGT_EXPR;
2275 case UNLE_EXPR:
2276 return UNGE_EXPR;
2277 default:
2278 gcc_unreachable ();
2283 /* Convert a comparison tree code from an enum tree_code representation
2284 into a compcode bit-based encoding. This function is the inverse of
2285 compcode_to_comparison. */
2287 static enum comparison_code
2288 comparison_to_compcode (enum tree_code code)
2290 switch (code)
2292 case LT_EXPR:
2293 return COMPCODE_LT;
2294 case EQ_EXPR:
2295 return COMPCODE_EQ;
2296 case LE_EXPR:
2297 return COMPCODE_LE;
2298 case GT_EXPR:
2299 return COMPCODE_GT;
2300 case NE_EXPR:
2301 return COMPCODE_NE;
2302 case GE_EXPR:
2303 return COMPCODE_GE;
2304 case ORDERED_EXPR:
2305 return COMPCODE_ORD;
2306 case UNORDERED_EXPR:
2307 return COMPCODE_UNORD;
2308 case UNLT_EXPR:
2309 return COMPCODE_UNLT;
2310 case UNEQ_EXPR:
2311 return COMPCODE_UNEQ;
2312 case UNLE_EXPR:
2313 return COMPCODE_UNLE;
2314 case UNGT_EXPR:
2315 return COMPCODE_UNGT;
2316 case LTGT_EXPR:
2317 return COMPCODE_LTGT;
2318 case UNGE_EXPR:
2319 return COMPCODE_UNGE;
2320 default:
2321 gcc_unreachable ();
2325 /* Convert a compcode bit-based encoding of a comparison operator back
2326 to GCC's enum tree_code representation. This function is the
2327 inverse of comparison_to_compcode. */
2329 static enum tree_code
2330 compcode_to_comparison (enum comparison_code code)
2332 switch (code)
2334 case COMPCODE_LT:
2335 return LT_EXPR;
2336 case COMPCODE_EQ:
2337 return EQ_EXPR;
2338 case COMPCODE_LE:
2339 return LE_EXPR;
2340 case COMPCODE_GT:
2341 return GT_EXPR;
2342 case COMPCODE_NE:
2343 return NE_EXPR;
2344 case COMPCODE_GE:
2345 return GE_EXPR;
2346 case COMPCODE_ORD:
2347 return ORDERED_EXPR;
2348 case COMPCODE_UNORD:
2349 return UNORDERED_EXPR;
2350 case COMPCODE_UNLT:
2351 return UNLT_EXPR;
2352 case COMPCODE_UNEQ:
2353 return UNEQ_EXPR;
2354 case COMPCODE_UNLE:
2355 return UNLE_EXPR;
2356 case COMPCODE_UNGT:
2357 return UNGT_EXPR;
2358 case COMPCODE_LTGT:
2359 return LTGT_EXPR;
2360 case COMPCODE_UNGE:
2361 return UNGE_EXPR;
2362 default:
2363 gcc_unreachable ();
2367 /* Return a tree for the comparison which is the combination of
2368 doing the AND or OR (depending on CODE) of the two operations LCODE
2369 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2370 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2371 if this makes the transformation invalid. */
2373 tree
2374 combine_comparisons (location_t loc,
2375 enum tree_code code, enum tree_code lcode,
2376 enum tree_code rcode, tree truth_type,
2377 tree ll_arg, tree lr_arg)
2379 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2380 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2381 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2382 int compcode;
2384 switch (code)
2386 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2387 compcode = lcompcode & rcompcode;
2388 break;
2390 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2391 compcode = lcompcode | rcompcode;
2392 break;
2394 default:
2395 return NULL_TREE;
2398 if (!honor_nans)
2400 /* Eliminate unordered comparisons, as well as LTGT and ORD
2401 which are not used unless the mode has NaNs. */
2402 compcode &= ~COMPCODE_UNORD;
2403 if (compcode == COMPCODE_LTGT)
2404 compcode = COMPCODE_NE;
2405 else if (compcode == COMPCODE_ORD)
2406 compcode = COMPCODE_TRUE;
2408 else if (flag_trapping_math)
2410 /* Check that the original operation and the optimized ones will trap
2411 under the same condition. */
2412 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2413 && (lcompcode != COMPCODE_EQ)
2414 && (lcompcode != COMPCODE_ORD);
2415 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2416 && (rcompcode != COMPCODE_EQ)
2417 && (rcompcode != COMPCODE_ORD);
2418 bool trap = (compcode & COMPCODE_UNORD) == 0
2419 && (compcode != COMPCODE_EQ)
2420 && (compcode != COMPCODE_ORD);
2422 /* In a short-circuited boolean expression the LHS might be
2423 such that the RHS, if evaluated, will never trap. For
2424 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2425 if neither x nor y is NaN. (This is a mixed blessing: for
2426 example, the expression above will never trap, hence
2427 optimizing it to x < y would be invalid). */
2428 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2429 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2430 rtrap = false;
2432 /* If the comparison was short-circuited, and only the RHS
2433 trapped, we may now generate a spurious trap. */
2434 if (rtrap && !ltrap
2435 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2436 return NULL_TREE;
2438 /* If we changed the conditions that cause a trap, we lose. */
2439 if ((ltrap || rtrap) != trap)
2440 return NULL_TREE;
2443 if (compcode == COMPCODE_TRUE)
2444 return constant_boolean_node (true, truth_type);
2445 else if (compcode == COMPCODE_FALSE)
2446 return constant_boolean_node (false, truth_type);
2447 else
2449 enum tree_code tcode;
2451 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2452 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2456 /* Return nonzero if two operands (typically of the same tree node)
2457 are necessarily equal. If either argument has side-effects this
2458 function returns zero. FLAGS modifies behavior as follows:
2460 If OEP_ONLY_CONST is set, only return nonzero for constants.
2461 This function tests whether the operands are indistinguishable;
2462 it does not test whether they are equal using C's == operation.
2463 The distinction is important for IEEE floating point, because
2464 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2465 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2467 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2468 even though it may hold multiple values during a function.
2469 This is because a GCC tree node guarantees that nothing else is
2470 executed between the evaluation of its "operands" (which may often
2471 be evaluated in arbitrary order). Hence if the operands themselves
2472 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2473 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2474 unset means assuming isochronic (or instantaneous) tree equivalence.
2475 Unless comparing arbitrary expression trees, such as from different
2476 statements, this flag can usually be left unset.
2478 If OEP_PURE_SAME is set, then pure functions with identical arguments
2479 are considered the same. It is used when the caller has other ways
2480 to ensure that global memory is unchanged in between.
2482 If OEP_ALLOW_NULL is set, this routine will not crash on NULL operands,
2483 and two NULL operands are considered equal. This flag is usually set
2484 in the context of frontend when ARG0 and/or ARG1 may be NULL mostly due
2485 to recursion on partially built expressions (e.g. a CAST_EXPR on a NULL
2486 tree.) In this case, we certainly don't want the compiler to crash and
2487 it's OK to consider two NULL operands equal. On the other hand, when
2488 called in the context of code generation and optimization, if NULL
2489 operands are not expected, silently ignoring them could be dangerous
2490 and might cause problems downstream that are hard to find/debug. In that
2491 case, the flag should probably not be set. */
2494 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2496 /* If either is NULL, they must be both NULL to be equal. We only do this
2497 check when OEP_ALLOW_NULL is set. */
2498 if ((flags & OEP_ALLOW_NULL) && (!arg0 || !arg1))
2499 return arg0 == arg1;
2501 /* If either is ERROR_MARK, they aren't equal. */
2502 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2503 || TREE_TYPE (arg0) == error_mark_node
2504 || TREE_TYPE (arg1) == error_mark_node)
2505 return 0;
2507 /* Similar, if either does not have a type (like a released SSA name),
2508 they aren't equal. */
2509 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2511 /* If the caller chooses to allow the comparison of operands without
2512 types, we will continue the comparison only when both of them don't
2513 have a type. */
2514 if (!(flags & OEP_ALLOW_NO_TYPE) || TREE_TYPE (arg0) || TREE_TYPE (arg1))
2515 return 0;
2518 /* Check equality of integer constants before bailing out due to
2519 precision differences. */
2520 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2521 return tree_int_cst_equal (arg0, arg1);
2523 /* If both types don't have the same signedness, then we can't consider
2524 them equal. We must check this before the STRIP_NOPS calls
2525 because they may change the signedness of the arguments. As pointers
2526 strictly don't have a signedness, require either two pointers or
2527 two non-pointers as well. */
2528 if (TREE_TYPE (arg0)
2529 && (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2530 || POINTER_TYPE_P (TREE_TYPE (arg0))
2531 != POINTER_TYPE_P (TREE_TYPE (arg1))))
2532 return 0;
2534 /* We cannot consider pointers to different address space equal. */
2535 if (TREE_TYPE (arg0)
2536 && (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2537 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2538 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1))))))
2539 return 0;
2541 /* If both types don't have the same precision, then it is not safe
2542 to strip NOPs. */
2543 if (element_precision (TREE_TYPE (arg0))
2544 != element_precision (TREE_TYPE (arg1)))
2545 return 0;
2547 STRIP_NOPS (arg0);
2548 STRIP_NOPS (arg1);
2550 /* In case both args are comparisons but with different comparison
2551 code, try to swap the comparison operands of one arg to produce
2552 a match and compare that variant. */
2553 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2554 && COMPARISON_CLASS_P (arg0)
2555 && COMPARISON_CLASS_P (arg1))
2557 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2559 if (TREE_CODE (arg0) == swap_code)
2560 return operand_equal_p (TREE_OPERAND (arg0, 0),
2561 TREE_OPERAND (arg1, 1), flags)
2562 && operand_equal_p (TREE_OPERAND (arg0, 1),
2563 TREE_OPERAND (arg1, 0), flags);
2566 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2567 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2568 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2569 return 0;
2571 /* This is needed for conversions and for COMPONENT_REF.
2572 Might as well play it safe and always test this. */
2573 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2574 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2575 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2576 return 0;
2578 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2579 We don't care about side effects in that case because the SAVE_EXPR
2580 takes care of that for us. In all other cases, two expressions are
2581 equal if they have no side effects. If we have two identical
2582 expressions with side effects that should be treated the same due
2583 to the only side effects being identical SAVE_EXPR's, that will
2584 be detected in the recursive calls below.
2585 If we are taking an invariant address of two identical objects
2586 they are necessarily equal as well. */
2587 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2588 && (TREE_CODE (arg0) == SAVE_EXPR
2589 || (flags & OEP_CONSTANT_ADDRESS_OF)
2590 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2591 return 1;
2593 /* Next handle constant cases, those for which we can return 1 even
2594 if ONLY_CONST is set. */
2595 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2596 switch (TREE_CODE (arg0))
2598 case INTEGER_CST:
2599 return tree_int_cst_equal (arg0, arg1);
2601 case FIXED_CST:
2602 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2603 TREE_FIXED_CST (arg1));
2605 case REAL_CST:
2606 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2607 TREE_REAL_CST (arg1)))
2608 return 1;
2611 if (TREE_TYPE (arg0)
2612 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2614 /* If we do not distinguish between signed and unsigned zero,
2615 consider them equal. */
2616 if (real_zerop (arg0) && real_zerop (arg1))
2617 return 1;
2619 return 0;
2621 case VECTOR_CST:
2623 unsigned i;
2625 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2626 return 0;
2628 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2630 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2631 VECTOR_CST_ELT (arg1, i), flags))
2632 return 0;
2634 return 1;
2637 case COMPLEX_CST:
2638 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2639 flags)
2640 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2641 flags));
2643 case STRING_CST:
2644 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2645 && ! memcmp (TREE_STRING_POINTER (arg0),
2646 TREE_STRING_POINTER (arg1),
2647 TREE_STRING_LENGTH (arg0)));
2649 case ADDR_EXPR:
2650 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2651 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2652 ? OEP_CONSTANT_ADDRESS_OF : 0);
2653 default:
2654 break;
2657 if (flags & OEP_ONLY_CONST)
2658 return 0;
2660 /* Define macros to test an operand from arg0 and arg1 for equality and a
2661 variant that allows null and views null as being different from any
2662 non-null value. In the latter case, if either is null, the both
2663 must be; otherwise, do the normal comparison. */
2664 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2665 TREE_OPERAND (arg1, N), flags)
2667 #define OP_SAME_WITH_NULL(N) \
2668 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2669 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2671 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2673 case tcc_unary:
2674 /* Two conversions are equal only if signedness and modes match. */
2675 switch (TREE_CODE (arg0))
2677 CASE_CONVERT:
2678 case FIX_TRUNC_EXPR:
2679 if (TREE_TYPE (arg0)
2680 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2681 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2682 return 0;
2683 break;
2684 default:
2685 break;
2688 return OP_SAME (0);
2691 case tcc_comparison:
2692 case tcc_binary:
2693 if (OP_SAME (0) && OP_SAME (1))
2694 return 1;
2696 /* For commutative ops, allow the other order. */
2697 return (commutative_tree_code (TREE_CODE (arg0))
2698 && operand_equal_p (TREE_OPERAND (arg0, 0),
2699 TREE_OPERAND (arg1, 1), flags)
2700 && operand_equal_p (TREE_OPERAND (arg0, 1),
2701 TREE_OPERAND (arg1, 0), flags));
2703 case tcc_reference:
2704 /* If either of the pointer (or reference) expressions we are
2705 dereferencing contain a side effect, these cannot be equal,
2706 but their addresses can be. */
2707 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2708 && (TREE_SIDE_EFFECTS (arg0)
2709 || TREE_SIDE_EFFECTS (arg1)))
2710 return 0;
2712 switch (TREE_CODE (arg0))
2714 case INDIRECT_REF:
2715 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2716 return OP_SAME (0);
2718 case REALPART_EXPR:
2719 case IMAGPART_EXPR:
2720 return OP_SAME (0);
2722 case TARGET_MEM_REF:
2723 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2724 /* Require equal extra operands and then fall through to MEM_REF
2725 handling of the two common operands. */
2726 if (!OP_SAME_WITH_NULL (2)
2727 || !OP_SAME_WITH_NULL (3)
2728 || !OP_SAME_WITH_NULL (4))
2729 return 0;
2730 /* Fallthru. */
2731 case MEM_REF:
2732 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2733 /* Require equal access sizes, and similar pointer types.
2734 We can have incomplete types for array references of
2735 variable-sized arrays from the Fortran frontend
2736 though. Also verify the types are compatible. */
2737 return (TREE_TYPE (arg0)
2738 && (TYPE_SIZE (TREE_TYPE (arg0))
2739 == TYPE_SIZE (TREE_TYPE (arg1))
2740 || (TYPE_SIZE (TREE_TYPE (arg0))
2741 && TYPE_SIZE (TREE_TYPE (arg1))
2742 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2743 TYPE_SIZE (TREE_TYPE (arg1)),
2744 flags)))
2745 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2746 && alias_ptr_types_compatible_p
2747 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2748 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2749 && OP_SAME (0) && OP_SAME (1));
2751 case ARRAY_REF:
2752 case ARRAY_RANGE_REF:
2753 /* Operands 2 and 3 may be null.
2754 Compare the array index by value if it is constant first as we
2755 may have different types but same value here. */
2756 if (!OP_SAME (0))
2757 return 0;
2758 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2759 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2760 TREE_OPERAND (arg1, 1))
2761 || OP_SAME (1))
2762 && OP_SAME_WITH_NULL (2)
2763 && OP_SAME_WITH_NULL (3));
2765 case COMPONENT_REF:
2766 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2767 may be NULL when we're called to compare MEM_EXPRs. */
2768 if (!OP_SAME_WITH_NULL (0)
2769 || !OP_SAME (1))
2770 return 0;
2771 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2772 return OP_SAME_WITH_NULL (2);
2774 case BIT_FIELD_REF:
2775 if (!OP_SAME (0))
2776 return 0;
2777 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2778 return OP_SAME (1) && OP_SAME (2);
2780 default:
2781 return 0;
2784 case tcc_expression:
2785 switch (TREE_CODE (arg0))
2787 case ADDR_EXPR:
2788 case TRUTH_NOT_EXPR:
2789 return OP_SAME (0);
2791 case TRUTH_ANDIF_EXPR:
2792 case TRUTH_ORIF_EXPR:
2793 return OP_SAME (0) && OP_SAME (1);
2795 case FMA_EXPR:
2796 case WIDEN_MULT_PLUS_EXPR:
2797 case WIDEN_MULT_MINUS_EXPR:
2798 if (!OP_SAME (2))
2799 return 0;
2800 /* The multiplcation operands are commutative. */
2801 /* FALLTHRU */
2803 case TRUTH_AND_EXPR:
2804 case TRUTH_OR_EXPR:
2805 case TRUTH_XOR_EXPR:
2806 if (OP_SAME (0) && OP_SAME (1))
2807 return 1;
2809 /* Otherwise take into account this is a commutative operation. */
2810 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2811 TREE_OPERAND (arg1, 1), flags)
2812 && operand_equal_p (TREE_OPERAND (arg0, 1),
2813 TREE_OPERAND (arg1, 0), flags));
2815 case COND_EXPR:
2816 case VEC_COND_EXPR:
2817 case DOT_PROD_EXPR:
2818 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2820 default:
2821 return 0;
2824 case tcc_vl_exp:
2825 switch (TREE_CODE (arg0))
2827 case CALL_EXPR:
2828 /* If the CALL_EXPRs call different functions, then they
2829 clearly can not be equal. */
2830 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2831 flags))
2832 return 0;
2835 unsigned int cef = call_expr_flags (arg0);
2836 if (flags & OEP_PURE_SAME)
2837 cef &= ECF_CONST | ECF_PURE;
2838 else
2839 cef &= ECF_CONST;
2840 if (!cef)
2841 return 0;
2844 /* Now see if all the arguments are the same. */
2846 const_call_expr_arg_iterator iter0, iter1;
2847 const_tree a0, a1;
2848 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2849 a1 = first_const_call_expr_arg (arg1, &iter1);
2850 a0 && a1;
2851 a0 = next_const_call_expr_arg (&iter0),
2852 a1 = next_const_call_expr_arg (&iter1))
2853 if (! operand_equal_p (a0, a1, flags))
2854 return 0;
2856 /* If we get here and both argument lists are exhausted
2857 then the CALL_EXPRs are equal. */
2858 return ! (a0 || a1);
2860 default:
2861 return 0;
2864 case tcc_declaration:
2865 /* Consider __builtin_sqrt equal to sqrt. */
2866 return (TREE_CODE (arg0) == FUNCTION_DECL
2867 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2868 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2869 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2871 default:
2872 return 0;
2875 #undef OP_SAME
2876 #undef OP_SAME_WITH_NULL
2879 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2880 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2882 When in doubt, return 0. */
2884 static int
2885 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2887 int unsignedp1, unsignedpo;
2888 tree primarg0, primarg1, primother;
2889 unsigned int correct_width;
2891 if (operand_equal_p (arg0, arg1, 0))
2892 return 1;
2894 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2895 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2896 return 0;
2898 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2899 and see if the inner values are the same. This removes any
2900 signedness comparison, which doesn't matter here. */
2901 primarg0 = arg0, primarg1 = arg1;
2902 STRIP_NOPS (primarg0);
2903 STRIP_NOPS (primarg1);
2904 if (operand_equal_p (primarg0, primarg1, 0))
2905 return 1;
2907 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2908 actual comparison operand, ARG0.
2910 First throw away any conversions to wider types
2911 already present in the operands. */
2913 primarg1 = get_narrower (arg1, &unsignedp1);
2914 primother = get_narrower (other, &unsignedpo);
2916 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2917 if (unsignedp1 == unsignedpo
2918 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2919 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2921 tree type = TREE_TYPE (arg0);
2923 /* Make sure shorter operand is extended the right way
2924 to match the longer operand. */
2925 primarg1 = fold_convert (signed_or_unsigned_type_for
2926 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2928 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2929 return 1;
2932 return 0;
2935 /* See if ARG is an expression that is either a comparison or is performing
2936 arithmetic on comparisons. The comparisons must only be comparing
2937 two different values, which will be stored in *CVAL1 and *CVAL2; if
2938 they are nonzero it means that some operands have already been found.
2939 No variables may be used anywhere else in the expression except in the
2940 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2941 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2943 If this is true, return 1. Otherwise, return zero. */
2945 static int
2946 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2948 enum tree_code code = TREE_CODE (arg);
2949 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2951 /* We can handle some of the tcc_expression cases here. */
2952 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2953 tclass = tcc_unary;
2954 else if (tclass == tcc_expression
2955 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2956 || code == COMPOUND_EXPR))
2957 tclass = tcc_binary;
2959 else if (tclass == tcc_expression && code == SAVE_EXPR
2960 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2962 /* If we've already found a CVAL1 or CVAL2, this expression is
2963 two complex to handle. */
2964 if (*cval1 || *cval2)
2965 return 0;
2967 tclass = tcc_unary;
2968 *save_p = 1;
2971 switch (tclass)
2973 case tcc_unary:
2974 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2976 case tcc_binary:
2977 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2978 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2979 cval1, cval2, save_p));
2981 case tcc_constant:
2982 return 1;
2984 case tcc_expression:
2985 if (code == COND_EXPR)
2986 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2987 cval1, cval2, save_p)
2988 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2989 cval1, cval2, save_p)
2990 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2991 cval1, cval2, save_p));
2992 return 0;
2994 case tcc_comparison:
2995 /* First see if we can handle the first operand, then the second. For
2996 the second operand, we know *CVAL1 can't be zero. It must be that
2997 one side of the comparison is each of the values; test for the
2998 case where this isn't true by failing if the two operands
2999 are the same. */
3001 if (operand_equal_p (TREE_OPERAND (arg, 0),
3002 TREE_OPERAND (arg, 1), 0))
3003 return 0;
3005 if (*cval1 == 0)
3006 *cval1 = TREE_OPERAND (arg, 0);
3007 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3009 else if (*cval2 == 0)
3010 *cval2 = TREE_OPERAND (arg, 0);
3011 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3013 else
3014 return 0;
3016 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3018 else if (*cval2 == 0)
3019 *cval2 = TREE_OPERAND (arg, 1);
3020 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3022 else
3023 return 0;
3025 return 1;
3027 default:
3028 return 0;
3032 /* ARG is a tree that is known to contain just arithmetic operations and
3033 comparisons. Evaluate the operations in the tree substituting NEW0 for
3034 any occurrence of OLD0 as an operand of a comparison and likewise for
3035 NEW1 and OLD1. */
3037 static tree
3038 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3039 tree old1, tree new1)
3041 tree type = TREE_TYPE (arg);
3042 enum tree_code code = TREE_CODE (arg);
3043 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3045 /* We can handle some of the tcc_expression cases here. */
3046 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3047 tclass = tcc_unary;
3048 else if (tclass == tcc_expression
3049 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3050 tclass = tcc_binary;
3052 switch (tclass)
3054 case tcc_unary:
3055 return fold_build1_loc (loc, code, type,
3056 eval_subst (loc, TREE_OPERAND (arg, 0),
3057 old0, new0, old1, new1));
3059 case tcc_binary:
3060 return fold_build2_loc (loc, code, type,
3061 eval_subst (loc, TREE_OPERAND (arg, 0),
3062 old0, new0, old1, new1),
3063 eval_subst (loc, TREE_OPERAND (arg, 1),
3064 old0, new0, old1, new1));
3066 case tcc_expression:
3067 switch (code)
3069 case SAVE_EXPR:
3070 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3071 old1, new1);
3073 case COMPOUND_EXPR:
3074 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3075 old1, new1);
3077 case COND_EXPR:
3078 return fold_build3_loc (loc, code, type,
3079 eval_subst (loc, TREE_OPERAND (arg, 0),
3080 old0, new0, old1, new1),
3081 eval_subst (loc, TREE_OPERAND (arg, 1),
3082 old0, new0, old1, new1),
3083 eval_subst (loc, TREE_OPERAND (arg, 2),
3084 old0, new0, old1, new1));
3085 default:
3086 break;
3088 /* Fall through - ??? */
3090 case tcc_comparison:
3092 tree arg0 = TREE_OPERAND (arg, 0);
3093 tree arg1 = TREE_OPERAND (arg, 1);
3095 /* We need to check both for exact equality and tree equality. The
3096 former will be true if the operand has a side-effect. In that
3097 case, we know the operand occurred exactly once. */
3099 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3100 arg0 = new0;
3101 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3102 arg0 = new1;
3104 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3105 arg1 = new0;
3106 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3107 arg1 = new1;
3109 return fold_build2_loc (loc, code, type, arg0, arg1);
3112 default:
3113 return arg;
3117 /* Return a tree for the case when the result of an expression is RESULT
3118 converted to TYPE and OMITTED was previously an operand of the expression
3119 but is now not needed (e.g., we folded OMITTED * 0).
3121 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3122 the conversion of RESULT to TYPE. */
3124 tree
3125 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3127 tree t = fold_convert_loc (loc, type, result);
3129 /* If the resulting operand is an empty statement, just return the omitted
3130 statement casted to void. */
3131 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3132 return build1_loc (loc, NOP_EXPR, void_type_node,
3133 fold_ignored_result (omitted));
3135 if (TREE_SIDE_EFFECTS (omitted))
3136 return build2_loc (loc, COMPOUND_EXPR, type,
3137 fold_ignored_result (omitted), t);
3139 return non_lvalue_loc (loc, t);
3142 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3144 static tree
3145 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3146 tree omitted)
3148 tree t = fold_convert_loc (loc, type, result);
3150 /* If the resulting operand is an empty statement, just return the omitted
3151 statement casted to void. */
3152 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3153 return build1_loc (loc, NOP_EXPR, void_type_node,
3154 fold_ignored_result (omitted));
3156 if (TREE_SIDE_EFFECTS (omitted))
3157 return build2_loc (loc, COMPOUND_EXPR, type,
3158 fold_ignored_result (omitted), t);
3160 return pedantic_non_lvalue_loc (loc, t);
3163 /* Return a tree for the case when the result of an expression is RESULT
3164 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3165 of the expression but are now not needed.
3167 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3168 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3169 evaluated before OMITTED2. Otherwise, if neither has side effects,
3170 just do the conversion of RESULT to TYPE. */
3172 tree
3173 omit_two_operands_loc (location_t loc, tree type, tree result,
3174 tree omitted1, tree omitted2)
3176 tree t = fold_convert_loc (loc, type, result);
3178 if (TREE_SIDE_EFFECTS (omitted2))
3179 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3180 if (TREE_SIDE_EFFECTS (omitted1))
3181 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3183 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3187 /* Return a simplified tree node for the truth-negation of ARG. This
3188 never alters ARG itself. We assume that ARG is an operation that
3189 returns a truth value (0 or 1).
3191 FIXME: one would think we would fold the result, but it causes
3192 problems with the dominator optimizer. */
3194 static tree
3195 fold_truth_not_expr (location_t loc, tree arg)
3197 tree type = TREE_TYPE (arg);
3198 enum tree_code code = TREE_CODE (arg);
3199 location_t loc1, loc2;
3201 /* If this is a comparison, we can simply invert it, except for
3202 floating-point non-equality comparisons, in which case we just
3203 enclose a TRUTH_NOT_EXPR around what we have. */
3205 if (TREE_CODE_CLASS (code) == tcc_comparison)
3207 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3208 if (FLOAT_TYPE_P (op_type)
3209 && flag_trapping_math
3210 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3211 && code != NE_EXPR && code != EQ_EXPR)
3212 return NULL_TREE;
3214 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3215 if (code == ERROR_MARK)
3216 return NULL_TREE;
3218 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3219 TREE_OPERAND (arg, 1));
3222 switch (code)
3224 case INTEGER_CST:
3225 return constant_boolean_node (integer_zerop (arg), type);
3227 case TRUTH_AND_EXPR:
3228 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3229 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3230 return build2_loc (loc, TRUTH_OR_EXPR, type,
3231 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3232 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3234 case TRUTH_OR_EXPR:
3235 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3236 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3237 return build2_loc (loc, TRUTH_AND_EXPR, type,
3238 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3239 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3241 case TRUTH_XOR_EXPR:
3242 /* Here we can invert either operand. We invert the first operand
3243 unless the second operand is a TRUTH_NOT_EXPR in which case our
3244 result is the XOR of the first operand with the inside of the
3245 negation of the second operand. */
3247 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3248 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3249 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3250 else
3251 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3252 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3253 TREE_OPERAND (arg, 1));
3255 case TRUTH_ANDIF_EXPR:
3256 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3257 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3258 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3259 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3260 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3262 case TRUTH_ORIF_EXPR:
3263 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3264 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3265 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3266 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3267 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3269 case TRUTH_NOT_EXPR:
3270 return TREE_OPERAND (arg, 0);
3272 case COND_EXPR:
3274 tree arg1 = TREE_OPERAND (arg, 1);
3275 tree arg2 = TREE_OPERAND (arg, 2);
3277 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3278 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3280 /* A COND_EXPR may have a throw as one operand, which
3281 then has void type. Just leave void operands
3282 as they are. */
3283 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3284 VOID_TYPE_P (TREE_TYPE (arg1))
3285 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3286 VOID_TYPE_P (TREE_TYPE (arg2))
3287 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3290 case COMPOUND_EXPR:
3291 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3292 return build2_loc (loc, COMPOUND_EXPR, type,
3293 TREE_OPERAND (arg, 0),
3294 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3296 case NON_LVALUE_EXPR:
3297 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3298 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3300 CASE_CONVERT:
3301 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3302 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3304 /* ... fall through ... */
3306 case FLOAT_EXPR:
3307 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3308 return build1_loc (loc, TREE_CODE (arg), type,
3309 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3311 case BIT_AND_EXPR:
3312 if (!integer_onep (TREE_OPERAND (arg, 1)))
3313 return NULL_TREE;
3314 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3316 case SAVE_EXPR:
3317 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3319 case CLEANUP_POINT_EXPR:
3320 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3321 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3322 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3324 default:
3325 return NULL_TREE;
3329 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3330 assume that ARG is an operation that returns a truth value (0 or 1
3331 for scalars, 0 or -1 for vectors). Return the folded expression if
3332 folding is successful. Otherwise, return NULL_TREE. */
3334 static tree
3335 fold_invert_truthvalue (location_t loc, tree arg)
3337 tree type = TREE_TYPE (arg);
3338 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3339 ? BIT_NOT_EXPR
3340 : TRUTH_NOT_EXPR,
3341 type, arg);
3344 /* Return a simplified tree node for the truth-negation of ARG. This
3345 never alters ARG itself. We assume that ARG is an operation that
3346 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3348 tree
3349 invert_truthvalue_loc (location_t loc, tree arg)
3351 if (TREE_CODE (arg) == ERROR_MARK)
3352 return arg;
3354 tree type = TREE_TYPE (arg);
3355 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3356 ? BIT_NOT_EXPR
3357 : TRUTH_NOT_EXPR,
3358 type, arg);
3361 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3362 operands are another bit-wise operation with a common input. If so,
3363 distribute the bit operations to save an operation and possibly two if
3364 constants are involved. For example, convert
3365 (A | B) & (A | C) into A | (B & C)
3366 Further simplification will occur if B and C are constants.
3368 If this optimization cannot be done, 0 will be returned. */
3370 static tree
3371 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3372 tree arg0, tree arg1)
3374 tree common;
3375 tree left, right;
3377 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3378 || TREE_CODE (arg0) == code
3379 || (TREE_CODE (arg0) != BIT_AND_EXPR
3380 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3381 return 0;
3383 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3385 common = TREE_OPERAND (arg0, 0);
3386 left = TREE_OPERAND (arg0, 1);
3387 right = TREE_OPERAND (arg1, 1);
3389 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3391 common = TREE_OPERAND (arg0, 0);
3392 left = TREE_OPERAND (arg0, 1);
3393 right = TREE_OPERAND (arg1, 0);
3395 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3397 common = TREE_OPERAND (arg0, 1);
3398 left = TREE_OPERAND (arg0, 0);
3399 right = TREE_OPERAND (arg1, 1);
3401 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3403 common = TREE_OPERAND (arg0, 1);
3404 left = TREE_OPERAND (arg0, 0);
3405 right = TREE_OPERAND (arg1, 0);
3407 else
3408 return 0;
3410 common = fold_convert_loc (loc, type, common);
3411 left = fold_convert_loc (loc, type, left);
3412 right = fold_convert_loc (loc, type, right);
3413 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3414 fold_build2_loc (loc, code, type, left, right));
3417 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3418 with code CODE. This optimization is unsafe. */
3419 static tree
3420 distribute_real_division (location_t loc, enum tree_code code, tree type,
3421 tree arg0, tree arg1)
3423 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3424 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3426 /* (A / C) +- (B / C) -> (A +- B) / C. */
3427 if (mul0 == mul1
3428 && operand_equal_p (TREE_OPERAND (arg0, 1),
3429 TREE_OPERAND (arg1, 1), 0))
3430 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3431 fold_build2_loc (loc, code, type,
3432 TREE_OPERAND (arg0, 0),
3433 TREE_OPERAND (arg1, 0)),
3434 TREE_OPERAND (arg0, 1));
3436 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3437 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3438 TREE_OPERAND (arg1, 0), 0)
3439 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3440 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3442 REAL_VALUE_TYPE r0, r1;
3443 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3444 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3445 if (!mul0)
3446 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3447 if (!mul1)
3448 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3449 real_arithmetic (&r0, code, &r0, &r1);
3450 return fold_build2_loc (loc, MULT_EXPR, type,
3451 TREE_OPERAND (arg0, 0),
3452 build_real (type, r0));
3455 return NULL_TREE;
3458 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3459 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3461 static tree
3462 make_bit_field_ref (location_t loc, tree inner, tree type,
3463 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3465 tree result, bftype;
3467 if (bitpos == 0)
3469 tree size = TYPE_SIZE (TREE_TYPE (inner));
3470 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3471 || POINTER_TYPE_P (TREE_TYPE (inner)))
3472 && tree_fits_shwi_p (size)
3473 && tree_to_shwi (size) == bitsize)
3474 return fold_convert_loc (loc, type, inner);
3477 bftype = type;
3478 if (TYPE_PRECISION (bftype) != bitsize
3479 || TYPE_UNSIGNED (bftype) == !unsignedp)
3480 bftype = build_nonstandard_integer_type (bitsize, 0);
3482 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3483 size_int (bitsize), bitsize_int (bitpos));
3485 if (bftype != type)
3486 result = fold_convert_loc (loc, type, result);
3488 return result;
3491 /* Optimize a bit-field compare.
3493 There are two cases: First is a compare against a constant and the
3494 second is a comparison of two items where the fields are at the same
3495 bit position relative to the start of a chunk (byte, halfword, word)
3496 large enough to contain it. In these cases we can avoid the shift
3497 implicit in bitfield extractions.
3499 For constants, we emit a compare of the shifted constant with the
3500 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3501 compared. For two fields at the same position, we do the ANDs with the
3502 similar mask and compare the result of the ANDs.
3504 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3505 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3506 are the left and right operands of the comparison, respectively.
3508 If the optimization described above can be done, we return the resulting
3509 tree. Otherwise we return zero. */
3511 static tree
3512 optimize_bit_field_compare (location_t loc, enum tree_code code,
3513 tree compare_type, tree lhs, tree rhs)
3515 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3516 tree type = TREE_TYPE (lhs);
3517 tree signed_type, unsigned_type;
3518 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3519 enum machine_mode lmode, rmode, nmode;
3520 int lunsignedp, runsignedp;
3521 int lvolatilep = 0, rvolatilep = 0;
3522 tree linner, rinner = NULL_TREE;
3523 tree mask;
3524 tree offset;
3526 /* Get all the information about the extractions being done. If the bit size
3527 if the same as the size of the underlying object, we aren't doing an
3528 extraction at all and so can do nothing. We also don't want to
3529 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3530 then will no longer be able to replace it. */
3531 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3532 &lunsignedp, &lvolatilep, false);
3533 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3534 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3535 return 0;
3537 if (!const_p)
3539 /* If this is not a constant, we can only do something if bit positions,
3540 sizes, and signedness are the same. */
3541 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3542 &runsignedp, &rvolatilep, false);
3544 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3545 || lunsignedp != runsignedp || offset != 0
3546 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3547 return 0;
3550 /* See if we can find a mode to refer to this field. We should be able to,
3551 but fail if we can't. */
3552 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3553 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3554 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3555 TYPE_ALIGN (TREE_TYPE (rinner))),
3556 word_mode, false);
3557 if (nmode == VOIDmode)
3558 return 0;
3560 /* Set signed and unsigned types of the precision of this mode for the
3561 shifts below. */
3562 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3563 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3565 /* Compute the bit position and size for the new reference and our offset
3566 within it. If the new reference is the same size as the original, we
3567 won't optimize anything, so return zero. */
3568 nbitsize = GET_MODE_BITSIZE (nmode);
3569 nbitpos = lbitpos & ~ (nbitsize - 1);
3570 lbitpos -= nbitpos;
3571 if (nbitsize == lbitsize)
3572 return 0;
3574 if (BYTES_BIG_ENDIAN)
3575 lbitpos = nbitsize - lbitsize - lbitpos;
3577 /* Make the mask to be used against the extracted field. */
3578 mask = build_int_cst_type (unsigned_type, -1);
3579 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3580 mask = const_binop (RSHIFT_EXPR, mask,
3581 size_int (nbitsize - lbitsize - lbitpos));
3583 if (! const_p)
3584 /* If not comparing with constant, just rework the comparison
3585 and return. */
3586 return fold_build2_loc (loc, code, compare_type,
3587 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3588 make_bit_field_ref (loc, linner,
3589 unsigned_type,
3590 nbitsize, nbitpos,
3592 mask),
3593 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3594 make_bit_field_ref (loc, rinner,
3595 unsigned_type,
3596 nbitsize, nbitpos,
3598 mask));
3600 /* Otherwise, we are handling the constant case. See if the constant is too
3601 big for the field. Warn and return a tree of for 0 (false) if so. We do
3602 this not only for its own sake, but to avoid having to test for this
3603 error case below. If we didn't, we might generate wrong code.
3605 For unsigned fields, the constant shifted right by the field length should
3606 be all zero. For signed fields, the high-order bits should agree with
3607 the sign bit. */
3609 if (lunsignedp)
3611 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3612 fold_convert_loc (loc,
3613 unsigned_type, rhs),
3614 size_int (lbitsize))))
3616 warning (0, "comparison is always %d due to width of bit-field",
3617 code == NE_EXPR);
3618 return constant_boolean_node (code == NE_EXPR, compare_type);
3621 else
3623 tree tem = const_binop (RSHIFT_EXPR,
3624 fold_convert_loc (loc, signed_type, rhs),
3625 size_int (lbitsize - 1));
3626 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3628 warning (0, "comparison is always %d due to width of bit-field",
3629 code == NE_EXPR);
3630 return constant_boolean_node (code == NE_EXPR, compare_type);
3634 /* Single-bit compares should always be against zero. */
3635 if (lbitsize == 1 && ! integer_zerop (rhs))
3637 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3638 rhs = build_int_cst (type, 0);
3641 /* Make a new bitfield reference, shift the constant over the
3642 appropriate number of bits and mask it with the computed mask
3643 (in case this was a signed field). If we changed it, make a new one. */
3644 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3646 rhs = const_binop (BIT_AND_EXPR,
3647 const_binop (LSHIFT_EXPR,
3648 fold_convert_loc (loc, unsigned_type, rhs),
3649 size_int (lbitpos)),
3650 mask);
3652 lhs = build2_loc (loc, code, compare_type,
3653 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3654 return lhs;
3657 /* Subroutine for fold_truth_andor_1: decode a field reference.
3659 If EXP is a comparison reference, we return the innermost reference.
3661 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3662 set to the starting bit number.
3664 If the innermost field can be completely contained in a mode-sized
3665 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3667 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3668 otherwise it is not changed.
3670 *PUNSIGNEDP is set to the signedness of the field.
3672 *PMASK is set to the mask used. This is either contained in a
3673 BIT_AND_EXPR or derived from the width of the field.
3675 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3677 Return 0 if this is not a component reference or is one that we can't
3678 do anything with. */
3680 static tree
3681 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3682 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3683 int *punsignedp, int *pvolatilep,
3684 tree *pmask, tree *pand_mask)
3686 tree outer_type = 0;
3687 tree and_mask = 0;
3688 tree mask, inner, offset;
3689 tree unsigned_type;
3690 unsigned int precision;
3692 /* All the optimizations using this function assume integer fields.
3693 There are problems with FP fields since the type_for_size call
3694 below can fail for, e.g., XFmode. */
3695 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3696 return 0;
3698 /* We are interested in the bare arrangement of bits, so strip everything
3699 that doesn't affect the machine mode. However, record the type of the
3700 outermost expression if it may matter below. */
3701 if (CONVERT_EXPR_P (exp)
3702 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3703 outer_type = TREE_TYPE (exp);
3704 STRIP_NOPS (exp);
3706 if (TREE_CODE (exp) == BIT_AND_EXPR)
3708 and_mask = TREE_OPERAND (exp, 1);
3709 exp = TREE_OPERAND (exp, 0);
3710 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3711 if (TREE_CODE (and_mask) != INTEGER_CST)
3712 return 0;
3715 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3716 punsignedp, pvolatilep, false);
3717 if ((inner == exp && and_mask == 0)
3718 || *pbitsize < 0 || offset != 0
3719 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3720 return 0;
3722 /* If the number of bits in the reference is the same as the bitsize of
3723 the outer type, then the outer type gives the signedness. Otherwise
3724 (in case of a small bitfield) the signedness is unchanged. */
3725 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3726 *punsignedp = TYPE_UNSIGNED (outer_type);
3728 /* Compute the mask to access the bitfield. */
3729 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3730 precision = TYPE_PRECISION (unsigned_type);
3732 mask = build_int_cst_type (unsigned_type, -1);
3734 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3735 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3737 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3738 if (and_mask != 0)
3739 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3740 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3742 *pmask = mask;
3743 *pand_mask = and_mask;
3744 return inner;
3747 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3748 bit positions. */
3750 static int
3751 all_ones_mask_p (const_tree mask, int size)
3753 tree type = TREE_TYPE (mask);
3754 unsigned int precision = TYPE_PRECISION (type);
3755 tree tmask;
3757 tmask = build_int_cst_type (signed_type_for (type), -1);
3759 return
3760 tree_int_cst_equal (mask,
3761 const_binop (RSHIFT_EXPR,
3762 const_binop (LSHIFT_EXPR, tmask,
3763 size_int (precision - size)),
3764 size_int (precision - size)));
3767 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3768 represents the sign bit of EXP's type. If EXP represents a sign
3769 or zero extension, also test VAL against the unextended type.
3770 The return value is the (sub)expression whose sign bit is VAL,
3771 or NULL_TREE otherwise. */
3773 static tree
3774 sign_bit_p (tree exp, const_tree val)
3776 unsigned HOST_WIDE_INT mask_lo, lo;
3777 HOST_WIDE_INT mask_hi, hi;
3778 int width;
3779 tree t;
3781 /* Tree EXP must have an integral type. */
3782 t = TREE_TYPE (exp);
3783 if (! INTEGRAL_TYPE_P (t))
3784 return NULL_TREE;
3786 /* Tree VAL must be an integer constant. */
3787 if (TREE_CODE (val) != INTEGER_CST
3788 || TREE_OVERFLOW (val))
3789 return NULL_TREE;
3791 width = TYPE_PRECISION (t);
3792 if (width > HOST_BITS_PER_WIDE_INT)
3794 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3795 lo = 0;
3797 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3798 mask_lo = -1;
3800 else
3802 hi = 0;
3803 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3805 mask_hi = 0;
3806 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3809 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3810 treat VAL as if it were unsigned. */
3811 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3812 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3813 return exp;
3815 /* Handle extension from a narrower type. */
3816 if (TREE_CODE (exp) == NOP_EXPR
3817 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3818 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3820 return NULL_TREE;
3823 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3824 to be evaluated unconditionally. */
3826 static int
3827 simple_operand_p (const_tree exp)
3829 /* Strip any conversions that don't change the machine mode. */
3830 STRIP_NOPS (exp);
3832 return (CONSTANT_CLASS_P (exp)
3833 || TREE_CODE (exp) == SSA_NAME
3834 || (DECL_P (exp)
3835 && ! TREE_ADDRESSABLE (exp)
3836 && ! TREE_THIS_VOLATILE (exp)
3837 && ! DECL_NONLOCAL (exp)
3838 /* Don't regard global variables as simple. They may be
3839 allocated in ways unknown to the compiler (shared memory,
3840 #pragma weak, etc). */
3841 && ! TREE_PUBLIC (exp)
3842 && ! DECL_EXTERNAL (exp)
3843 /* Weakrefs are not safe to be read, since they can be NULL.
3844 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3845 have DECL_WEAK flag set. */
3846 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3847 /* Loading a static variable is unduly expensive, but global
3848 registers aren't expensive. */
3849 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3852 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3853 to be evaluated unconditionally.
3854 I addition to simple_operand_p, we assume that comparisons, conversions,
3855 and logic-not operations are simple, if their operands are simple, too. */
3857 static bool
3858 simple_operand_p_2 (tree exp)
3860 enum tree_code code;
3862 if (TREE_SIDE_EFFECTS (exp)
3863 || tree_could_trap_p (exp))
3864 return false;
3866 while (CONVERT_EXPR_P (exp))
3867 exp = TREE_OPERAND (exp, 0);
3869 code = TREE_CODE (exp);
3871 if (TREE_CODE_CLASS (code) == tcc_comparison)
3872 return (simple_operand_p (TREE_OPERAND (exp, 0))
3873 && simple_operand_p (TREE_OPERAND (exp, 1)));
3875 if (code == TRUTH_NOT_EXPR)
3876 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3878 return simple_operand_p (exp);
3882 /* The following functions are subroutines to fold_range_test and allow it to
3883 try to change a logical combination of comparisons into a range test.
3885 For example, both
3886 X == 2 || X == 3 || X == 4 || X == 5
3888 X >= 2 && X <= 5
3889 are converted to
3890 (unsigned) (X - 2) <= 3
3892 We describe each set of comparisons as being either inside or outside
3893 a range, using a variable named like IN_P, and then describe the
3894 range with a lower and upper bound. If one of the bounds is omitted,
3895 it represents either the highest or lowest value of the type.
3897 In the comments below, we represent a range by two numbers in brackets
3898 preceded by a "+" to designate being inside that range, or a "-" to
3899 designate being outside that range, so the condition can be inverted by
3900 flipping the prefix. An omitted bound is represented by a "-". For
3901 example, "- [-, 10]" means being outside the range starting at the lowest
3902 possible value and ending at 10, in other words, being greater than 10.
3903 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3904 always false.
3906 We set up things so that the missing bounds are handled in a consistent
3907 manner so neither a missing bound nor "true" and "false" need to be
3908 handled using a special case. */
3910 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3911 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3912 and UPPER1_P are nonzero if the respective argument is an upper bound
3913 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3914 must be specified for a comparison. ARG1 will be converted to ARG0's
3915 type if both are specified. */
3917 static tree
3918 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3919 tree arg1, int upper1_p)
3921 tree tem;
3922 int result;
3923 int sgn0, sgn1;
3925 /* If neither arg represents infinity, do the normal operation.
3926 Else, if not a comparison, return infinity. Else handle the special
3927 comparison rules. Note that most of the cases below won't occur, but
3928 are handled for consistency. */
3930 if (arg0 != 0 && arg1 != 0)
3932 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3933 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3934 STRIP_NOPS (tem);
3935 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3938 if (TREE_CODE_CLASS (code) != tcc_comparison)
3939 return 0;
3941 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3942 for neither. In real maths, we cannot assume open ended ranges are
3943 the same. But, this is computer arithmetic, where numbers are finite.
3944 We can therefore make the transformation of any unbounded range with
3945 the value Z, Z being greater than any representable number. This permits
3946 us to treat unbounded ranges as equal. */
3947 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3948 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3949 switch (code)
3951 case EQ_EXPR:
3952 result = sgn0 == sgn1;
3953 break;
3954 case NE_EXPR:
3955 result = sgn0 != sgn1;
3956 break;
3957 case LT_EXPR:
3958 result = sgn0 < sgn1;
3959 break;
3960 case LE_EXPR:
3961 result = sgn0 <= sgn1;
3962 break;
3963 case GT_EXPR:
3964 result = sgn0 > sgn1;
3965 break;
3966 case GE_EXPR:
3967 result = sgn0 >= sgn1;
3968 break;
3969 default:
3970 gcc_unreachable ();
3973 return constant_boolean_node (result, type);
3976 /* Helper routine for make_range. Perform one step for it, return
3977 new expression if the loop should continue or NULL_TREE if it should
3978 stop. */
3980 tree
3981 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3982 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3983 bool *strict_overflow_p)
3985 tree arg0_type = TREE_TYPE (arg0);
3986 tree n_low, n_high, low = *p_low, high = *p_high;
3987 int in_p = *p_in_p, n_in_p;
3989 switch (code)
3991 case TRUTH_NOT_EXPR:
3992 /* We can only do something if the range is testing for zero. */
3993 if (low == NULL_TREE || high == NULL_TREE
3994 || ! integer_zerop (low) || ! integer_zerop (high))
3995 return NULL_TREE;
3996 *p_in_p = ! in_p;
3997 return arg0;
3999 case EQ_EXPR: case NE_EXPR:
4000 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4001 /* We can only do something if the range is testing for zero
4002 and if the second operand is an integer constant. Note that
4003 saying something is "in" the range we make is done by
4004 complementing IN_P since it will set in the initial case of
4005 being not equal to zero; "out" is leaving it alone. */
4006 if (low == NULL_TREE || high == NULL_TREE
4007 || ! integer_zerop (low) || ! integer_zerop (high)
4008 || TREE_CODE (arg1) != INTEGER_CST)
4009 return NULL_TREE;
4011 switch (code)
4013 case NE_EXPR: /* - [c, c] */
4014 low = high = arg1;
4015 break;
4016 case EQ_EXPR: /* + [c, c] */
4017 in_p = ! in_p, low = high = arg1;
4018 break;
4019 case GT_EXPR: /* - [-, c] */
4020 low = 0, high = arg1;
4021 break;
4022 case GE_EXPR: /* + [c, -] */
4023 in_p = ! in_p, low = arg1, high = 0;
4024 break;
4025 case LT_EXPR: /* - [c, -] */
4026 low = arg1, high = 0;
4027 break;
4028 case LE_EXPR: /* + [-, c] */
4029 in_p = ! in_p, low = 0, high = arg1;
4030 break;
4031 default:
4032 gcc_unreachable ();
4035 /* If this is an unsigned comparison, we also know that EXP is
4036 greater than or equal to zero. We base the range tests we make
4037 on that fact, so we record it here so we can parse existing
4038 range tests. We test arg0_type since often the return type
4039 of, e.g. EQ_EXPR, is boolean. */
4040 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4042 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4043 in_p, low, high, 1,
4044 build_int_cst (arg0_type, 0),
4045 NULL_TREE))
4046 return NULL_TREE;
4048 in_p = n_in_p, low = n_low, high = n_high;
4050 /* If the high bound is missing, but we have a nonzero low
4051 bound, reverse the range so it goes from zero to the low bound
4052 minus 1. */
4053 if (high == 0 && low && ! integer_zerop (low))
4055 in_p = ! in_p;
4056 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4057 integer_one_node, 0);
4058 low = build_int_cst (arg0_type, 0);
4062 *p_low = low;
4063 *p_high = high;
4064 *p_in_p = in_p;
4065 return arg0;
4067 case NEGATE_EXPR:
4068 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4069 low and high are non-NULL, then normalize will DTRT. */
4070 if (!TYPE_UNSIGNED (arg0_type)
4071 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4073 if (low == NULL_TREE)
4074 low = TYPE_MIN_VALUE (arg0_type);
4075 if (high == NULL_TREE)
4076 high = TYPE_MAX_VALUE (arg0_type);
4079 /* (-x) IN [a,b] -> x in [-b, -a] */
4080 n_low = range_binop (MINUS_EXPR, exp_type,
4081 build_int_cst (exp_type, 0),
4082 0, high, 1);
4083 n_high = range_binop (MINUS_EXPR, exp_type,
4084 build_int_cst (exp_type, 0),
4085 0, low, 0);
4086 if (n_high != 0 && TREE_OVERFLOW (n_high))
4087 return NULL_TREE;
4088 goto normalize;
4090 case BIT_NOT_EXPR:
4091 /* ~ X -> -X - 1 */
4092 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4093 build_int_cst (exp_type, 1));
4095 case PLUS_EXPR:
4096 case MINUS_EXPR:
4097 if (TREE_CODE (arg1) != INTEGER_CST)
4098 return NULL_TREE;
4100 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4101 move a constant to the other side. */
4102 if (!TYPE_UNSIGNED (arg0_type)
4103 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4104 return NULL_TREE;
4106 /* If EXP is signed, any overflow in the computation is undefined,
4107 so we don't worry about it so long as our computations on
4108 the bounds don't overflow. For unsigned, overflow is defined
4109 and this is exactly the right thing. */
4110 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4111 arg0_type, low, 0, arg1, 0);
4112 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4113 arg0_type, high, 1, arg1, 0);
4114 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4115 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4116 return NULL_TREE;
4118 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4119 *strict_overflow_p = true;
4121 normalize:
4122 /* Check for an unsigned range which has wrapped around the maximum
4123 value thus making n_high < n_low, and normalize it. */
4124 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4126 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4127 integer_one_node, 0);
4128 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4129 integer_one_node, 0);
4131 /* If the range is of the form +/- [ x+1, x ], we won't
4132 be able to normalize it. But then, it represents the
4133 whole range or the empty set, so make it
4134 +/- [ -, - ]. */
4135 if (tree_int_cst_equal (n_low, low)
4136 && tree_int_cst_equal (n_high, high))
4137 low = high = 0;
4138 else
4139 in_p = ! in_p;
4141 else
4142 low = n_low, high = n_high;
4144 *p_low = low;
4145 *p_high = high;
4146 *p_in_p = in_p;
4147 return arg0;
4149 CASE_CONVERT:
4150 case NON_LVALUE_EXPR:
4151 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4152 return NULL_TREE;
4154 if (! INTEGRAL_TYPE_P (arg0_type)
4155 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4156 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4157 return NULL_TREE;
4159 n_low = low, n_high = high;
4161 if (n_low != 0)
4162 n_low = fold_convert_loc (loc, arg0_type, n_low);
4164 if (n_high != 0)
4165 n_high = fold_convert_loc (loc, arg0_type, n_high);
4167 /* If we're converting arg0 from an unsigned type, to exp,
4168 a signed type, we will be doing the comparison as unsigned.
4169 The tests above have already verified that LOW and HIGH
4170 are both positive.
4172 So we have to ensure that we will handle large unsigned
4173 values the same way that the current signed bounds treat
4174 negative values. */
4176 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4178 tree high_positive;
4179 tree equiv_type;
4180 /* For fixed-point modes, we need to pass the saturating flag
4181 as the 2nd parameter. */
4182 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4183 equiv_type
4184 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4185 TYPE_SATURATING (arg0_type));
4186 else
4187 equiv_type
4188 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4190 /* A range without an upper bound is, naturally, unbounded.
4191 Since convert would have cropped a very large value, use
4192 the max value for the destination type. */
4193 high_positive
4194 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4195 : TYPE_MAX_VALUE (arg0_type);
4197 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4198 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4199 fold_convert_loc (loc, arg0_type,
4200 high_positive),
4201 build_int_cst (arg0_type, 1));
4203 /* If the low bound is specified, "and" the range with the
4204 range for which the original unsigned value will be
4205 positive. */
4206 if (low != 0)
4208 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4209 1, fold_convert_loc (loc, arg0_type,
4210 integer_zero_node),
4211 high_positive))
4212 return NULL_TREE;
4214 in_p = (n_in_p == in_p);
4216 else
4218 /* Otherwise, "or" the range with the range of the input
4219 that will be interpreted as negative. */
4220 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4221 1, fold_convert_loc (loc, arg0_type,
4222 integer_zero_node),
4223 high_positive))
4224 return NULL_TREE;
4226 in_p = (in_p != n_in_p);
4230 *p_low = n_low;
4231 *p_high = n_high;
4232 *p_in_p = in_p;
4233 return arg0;
4235 default:
4236 return NULL_TREE;
4240 /* Given EXP, a logical expression, set the range it is testing into
4241 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4242 actually being tested. *PLOW and *PHIGH will be made of the same
4243 type as the returned expression. If EXP is not a comparison, we
4244 will most likely not be returning a useful value and range. Set
4245 *STRICT_OVERFLOW_P to true if the return value is only valid
4246 because signed overflow is undefined; otherwise, do not change
4247 *STRICT_OVERFLOW_P. */
4249 tree
4250 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4251 bool *strict_overflow_p)
4253 enum tree_code code;
4254 tree arg0, arg1 = NULL_TREE;
4255 tree exp_type, nexp;
4256 int in_p;
4257 tree low, high;
4258 location_t loc = EXPR_LOCATION (exp);
4260 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4261 and see if we can refine the range. Some of the cases below may not
4262 happen, but it doesn't seem worth worrying about this. We "continue"
4263 the outer loop when we've changed something; otherwise we "break"
4264 the switch, which will "break" the while. */
4266 in_p = 0;
4267 low = high = build_int_cst (TREE_TYPE (exp), 0);
4269 while (1)
4271 code = TREE_CODE (exp);
4272 exp_type = TREE_TYPE (exp);
4273 arg0 = NULL_TREE;
4275 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4277 if (TREE_OPERAND_LENGTH (exp) > 0)
4278 arg0 = TREE_OPERAND (exp, 0);
4279 if (TREE_CODE_CLASS (code) == tcc_binary
4280 || TREE_CODE_CLASS (code) == tcc_comparison
4281 || (TREE_CODE_CLASS (code) == tcc_expression
4282 && TREE_OPERAND_LENGTH (exp) > 1))
4283 arg1 = TREE_OPERAND (exp, 1);
4285 if (arg0 == NULL_TREE)
4286 break;
4288 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4289 &high, &in_p, strict_overflow_p);
4290 if (nexp == NULL_TREE)
4291 break;
4292 exp = nexp;
4295 /* If EXP is a constant, we can evaluate whether this is true or false. */
4296 if (TREE_CODE (exp) == INTEGER_CST)
4298 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4299 exp, 0, low, 0))
4300 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4301 exp, 1, high, 1)));
4302 low = high = 0;
4303 exp = 0;
4306 *pin_p = in_p, *plow = low, *phigh = high;
4307 return exp;
4310 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4311 type, TYPE, return an expression to test if EXP is in (or out of, depending
4312 on IN_P) the range. Return 0 if the test couldn't be created. */
4314 tree
4315 build_range_check (location_t loc, tree type, tree exp, int in_p,
4316 tree low, tree high)
4318 tree etype = TREE_TYPE (exp), value;
4320 #ifdef HAVE_canonicalize_funcptr_for_compare
4321 /* Disable this optimization for function pointer expressions
4322 on targets that require function pointer canonicalization. */
4323 if (HAVE_canonicalize_funcptr_for_compare
4324 && TREE_CODE (etype) == POINTER_TYPE
4325 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4326 return NULL_TREE;
4327 #endif
4329 if (! in_p)
4331 value = build_range_check (loc, type, exp, 1, low, high);
4332 if (value != 0)
4333 return invert_truthvalue_loc (loc, value);
4335 return 0;
4338 if (low == 0 && high == 0)
4339 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4341 if (low == 0)
4342 return fold_build2_loc (loc, LE_EXPR, type, exp,
4343 fold_convert_loc (loc, etype, high));
4345 if (high == 0)
4346 return fold_build2_loc (loc, GE_EXPR, type, exp,
4347 fold_convert_loc (loc, etype, low));
4349 if (operand_equal_p (low, high, 0))
4350 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4351 fold_convert_loc (loc, etype, low));
4353 if (integer_zerop (low))
4355 if (! TYPE_UNSIGNED (etype))
4357 etype = unsigned_type_for (etype);
4358 high = fold_convert_loc (loc, etype, high);
4359 exp = fold_convert_loc (loc, etype, exp);
4361 return build_range_check (loc, type, exp, 1, 0, high);
4364 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4365 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4367 unsigned HOST_WIDE_INT lo;
4368 HOST_WIDE_INT hi;
4369 int prec;
4371 prec = TYPE_PRECISION (etype);
4372 if (prec <= HOST_BITS_PER_WIDE_INT)
4374 hi = 0;
4375 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4377 else
4379 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4380 lo = HOST_WIDE_INT_M1U;
4383 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4385 if (TYPE_UNSIGNED (etype))
4387 tree signed_etype = signed_type_for (etype);
4388 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4389 etype
4390 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4391 else
4392 etype = signed_etype;
4393 exp = fold_convert_loc (loc, etype, exp);
4395 return fold_build2_loc (loc, GT_EXPR, type, exp,
4396 build_int_cst (etype, 0));
4400 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4401 This requires wrap-around arithmetics for the type of the expression.
4402 First make sure that arithmetics in this type is valid, then make sure
4403 that it wraps around. */
4404 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4405 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4406 TYPE_UNSIGNED (etype));
4408 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4410 tree utype, minv, maxv;
4412 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4413 for the type in question, as we rely on this here. */
4414 utype = unsigned_type_for (etype);
4415 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4416 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4417 integer_one_node, 1);
4418 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4420 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4421 minv, 1, maxv, 1)))
4422 etype = utype;
4423 else
4424 return 0;
4427 high = fold_convert_loc (loc, etype, high);
4428 low = fold_convert_loc (loc, etype, low);
4429 exp = fold_convert_loc (loc, etype, exp);
4431 value = const_binop (MINUS_EXPR, high, low);
4434 if (POINTER_TYPE_P (etype))
4436 if (value != 0 && !TREE_OVERFLOW (value))
4438 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4439 return build_range_check (loc, type,
4440 fold_build_pointer_plus_loc (loc, exp, low),
4441 1, build_int_cst (etype, 0), value);
4443 return 0;
4446 if (value != 0 && !TREE_OVERFLOW (value))
4447 return build_range_check (loc, type,
4448 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4449 1, build_int_cst (etype, 0), value);
4451 return 0;
4454 /* Return the predecessor of VAL in its type, handling the infinite case. */
4456 static tree
4457 range_predecessor (tree val)
4459 tree type = TREE_TYPE (val);
4461 if (INTEGRAL_TYPE_P (type)
4462 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4463 return 0;
4464 else
4465 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4468 /* Return the successor of VAL in its type, handling the infinite case. */
4470 static tree
4471 range_successor (tree val)
4473 tree type = TREE_TYPE (val);
4475 if (INTEGRAL_TYPE_P (type)
4476 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4477 return 0;
4478 else
4479 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4482 /* Given two ranges, see if we can merge them into one. Return 1 if we
4483 can, 0 if we can't. Set the output range into the specified parameters. */
4485 bool
4486 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4487 tree high0, int in1_p, tree low1, tree high1)
4489 int no_overlap;
4490 int subset;
4491 int temp;
4492 tree tem;
4493 int in_p;
4494 tree low, high;
4495 int lowequal = ((low0 == 0 && low1 == 0)
4496 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4497 low0, 0, low1, 0)));
4498 int highequal = ((high0 == 0 && high1 == 0)
4499 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4500 high0, 1, high1, 1)));
4502 /* Make range 0 be the range that starts first, or ends last if they
4503 start at the same value. Swap them if it isn't. */
4504 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4505 low0, 0, low1, 0))
4506 || (lowequal
4507 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4508 high1, 1, high0, 1))))
4510 temp = in0_p, in0_p = in1_p, in1_p = temp;
4511 tem = low0, low0 = low1, low1 = tem;
4512 tem = high0, high0 = high1, high1 = tem;
4515 /* Now flag two cases, whether the ranges are disjoint or whether the
4516 second range is totally subsumed in the first. Note that the tests
4517 below are simplified by the ones above. */
4518 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4519 high0, 1, low1, 0));
4520 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4521 high1, 1, high0, 1));
4523 /* We now have four cases, depending on whether we are including or
4524 excluding the two ranges. */
4525 if (in0_p && in1_p)
4527 /* If they don't overlap, the result is false. If the second range
4528 is a subset it is the result. Otherwise, the range is from the start
4529 of the second to the end of the first. */
4530 if (no_overlap)
4531 in_p = 0, low = high = 0;
4532 else if (subset)
4533 in_p = 1, low = low1, high = high1;
4534 else
4535 in_p = 1, low = low1, high = high0;
4538 else if (in0_p && ! in1_p)
4540 /* If they don't overlap, the result is the first range. If they are
4541 equal, the result is false. If the second range is a subset of the
4542 first, and the ranges begin at the same place, we go from just after
4543 the end of the second range to the end of the first. If the second
4544 range is not a subset of the first, or if it is a subset and both
4545 ranges end at the same place, the range starts at the start of the
4546 first range and ends just before the second range.
4547 Otherwise, we can't describe this as a single range. */
4548 if (no_overlap)
4549 in_p = 1, low = low0, high = high0;
4550 else if (lowequal && highequal)
4551 in_p = 0, low = high = 0;
4552 else if (subset && lowequal)
4554 low = range_successor (high1);
4555 high = high0;
4556 in_p = 1;
4557 if (low == 0)
4559 /* We are in the weird situation where high0 > high1 but
4560 high1 has no successor. Punt. */
4561 return 0;
4564 else if (! subset || highequal)
4566 low = low0;
4567 high = range_predecessor (low1);
4568 in_p = 1;
4569 if (high == 0)
4571 /* low0 < low1 but low1 has no predecessor. Punt. */
4572 return 0;
4575 else
4576 return 0;
4579 else if (! in0_p && in1_p)
4581 /* If they don't overlap, the result is the second range. If the second
4582 is a subset of the first, the result is false. Otherwise,
4583 the range starts just after the first range and ends at the
4584 end of the second. */
4585 if (no_overlap)
4586 in_p = 1, low = low1, high = high1;
4587 else if (subset || highequal)
4588 in_p = 0, low = high = 0;
4589 else
4591 low = range_successor (high0);
4592 high = high1;
4593 in_p = 1;
4594 if (low == 0)
4596 /* high1 > high0 but high0 has no successor. Punt. */
4597 return 0;
4602 else
4604 /* The case where we are excluding both ranges. Here the complex case
4605 is if they don't overlap. In that case, the only time we have a
4606 range is if they are adjacent. If the second is a subset of the
4607 first, the result is the first. Otherwise, the range to exclude
4608 starts at the beginning of the first range and ends at the end of the
4609 second. */
4610 if (no_overlap)
4612 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4613 range_successor (high0),
4614 1, low1, 0)))
4615 in_p = 0, low = low0, high = high1;
4616 else
4618 /* Canonicalize - [min, x] into - [-, x]. */
4619 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4620 switch (TREE_CODE (TREE_TYPE (low0)))
4622 case ENUMERAL_TYPE:
4623 if (TYPE_PRECISION (TREE_TYPE (low0))
4624 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4625 break;
4626 /* FALLTHROUGH */
4627 case INTEGER_TYPE:
4628 if (tree_int_cst_equal (low0,
4629 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4630 low0 = 0;
4631 break;
4632 case POINTER_TYPE:
4633 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4634 && integer_zerop (low0))
4635 low0 = 0;
4636 break;
4637 default:
4638 break;
4641 /* Canonicalize - [x, max] into - [x, -]. */
4642 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4643 switch (TREE_CODE (TREE_TYPE (high1)))
4645 case ENUMERAL_TYPE:
4646 if (TYPE_PRECISION (TREE_TYPE (high1))
4647 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4648 break;
4649 /* FALLTHROUGH */
4650 case INTEGER_TYPE:
4651 if (tree_int_cst_equal (high1,
4652 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4653 high1 = 0;
4654 break;
4655 case POINTER_TYPE:
4656 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4657 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4658 high1, 1,
4659 integer_one_node, 1)))
4660 high1 = 0;
4661 break;
4662 default:
4663 break;
4666 /* The ranges might be also adjacent between the maximum and
4667 minimum values of the given type. For
4668 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4669 return + [x + 1, y - 1]. */
4670 if (low0 == 0 && high1 == 0)
4672 low = range_successor (high0);
4673 high = range_predecessor (low1);
4674 if (low == 0 || high == 0)
4675 return 0;
4677 in_p = 1;
4679 else
4680 return 0;
4683 else if (subset)
4684 in_p = 0, low = low0, high = high0;
4685 else
4686 in_p = 0, low = low0, high = high1;
4689 *pin_p = in_p, *plow = low, *phigh = high;
4690 return 1;
4694 /* Subroutine of fold, looking inside expressions of the form
4695 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4696 of the COND_EXPR. This function is being used also to optimize
4697 A op B ? C : A, by reversing the comparison first.
4699 Return a folded expression whose code is not a COND_EXPR
4700 anymore, or NULL_TREE if no folding opportunity is found. */
4702 static tree
4703 fold_cond_expr_with_comparison (location_t loc, tree type,
4704 tree arg0, tree arg1, tree arg2)
4706 enum tree_code comp_code = TREE_CODE (arg0);
4707 tree arg00 = TREE_OPERAND (arg0, 0);
4708 tree arg01 = TREE_OPERAND (arg0, 1);
4709 tree arg1_type = TREE_TYPE (arg1);
4710 tree tem;
4712 STRIP_NOPS (arg1);
4713 STRIP_NOPS (arg2);
4715 /* If we have A op 0 ? A : -A, consider applying the following
4716 transformations:
4718 A == 0? A : -A same as -A
4719 A != 0? A : -A same as A
4720 A >= 0? A : -A same as abs (A)
4721 A > 0? A : -A same as abs (A)
4722 A <= 0? A : -A same as -abs (A)
4723 A < 0? A : -A same as -abs (A)
4725 None of these transformations work for modes with signed
4726 zeros. If A is +/-0, the first two transformations will
4727 change the sign of the result (from +0 to -0, or vice
4728 versa). The last four will fix the sign of the result,
4729 even though the original expressions could be positive or
4730 negative, depending on the sign of A.
4732 Note that all these transformations are correct if A is
4733 NaN, since the two alternatives (A and -A) are also NaNs. */
4734 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4735 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4736 ? real_zerop (arg01)
4737 : integer_zerop (arg01))
4738 && ((TREE_CODE (arg2) == NEGATE_EXPR
4739 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4740 /* In the case that A is of the form X-Y, '-A' (arg2) may
4741 have already been folded to Y-X, check for that. */
4742 || (TREE_CODE (arg1) == MINUS_EXPR
4743 && TREE_CODE (arg2) == MINUS_EXPR
4744 && operand_equal_p (TREE_OPERAND (arg1, 0),
4745 TREE_OPERAND (arg2, 1), 0)
4746 && operand_equal_p (TREE_OPERAND (arg1, 1),
4747 TREE_OPERAND (arg2, 0), 0))))
4748 switch (comp_code)
4750 case EQ_EXPR:
4751 case UNEQ_EXPR:
4752 tem = fold_convert_loc (loc, arg1_type, arg1);
4753 return pedantic_non_lvalue_loc (loc,
4754 fold_convert_loc (loc, type,
4755 negate_expr (tem)));
4756 case NE_EXPR:
4757 case LTGT_EXPR:
4758 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4759 case UNGE_EXPR:
4760 case UNGT_EXPR:
4761 if (flag_trapping_math)
4762 break;
4763 /* Fall through. */
4764 case GE_EXPR:
4765 case GT_EXPR:
4766 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4767 arg1 = fold_convert_loc (loc, signed_type_for
4768 (TREE_TYPE (arg1)), arg1);
4769 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4770 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4771 case UNLE_EXPR:
4772 case UNLT_EXPR:
4773 if (flag_trapping_math)
4774 break;
4775 case LE_EXPR:
4776 case LT_EXPR:
4777 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4778 arg1 = fold_convert_loc (loc, signed_type_for
4779 (TREE_TYPE (arg1)), arg1);
4780 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4781 return negate_expr (fold_convert_loc (loc, type, tem));
4782 default:
4783 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4784 break;
4787 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4788 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4789 both transformations are correct when A is NaN: A != 0
4790 is then true, and A == 0 is false. */
4792 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4793 && integer_zerop (arg01) && integer_zerop (arg2))
4795 if (comp_code == NE_EXPR)
4796 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4797 else if (comp_code == EQ_EXPR)
4798 return build_zero_cst (type);
4801 /* Try some transformations of A op B ? A : B.
4803 A == B? A : B same as B
4804 A != B? A : B same as A
4805 A >= B? A : B same as max (A, B)
4806 A > B? A : B same as max (B, A)
4807 A <= B? A : B same as min (A, B)
4808 A < B? A : B same as min (B, A)
4810 As above, these transformations don't work in the presence
4811 of signed zeros. For example, if A and B are zeros of
4812 opposite sign, the first two transformations will change
4813 the sign of the result. In the last four, the original
4814 expressions give different results for (A=+0, B=-0) and
4815 (A=-0, B=+0), but the transformed expressions do not.
4817 The first two transformations are correct if either A or B
4818 is a NaN. In the first transformation, the condition will
4819 be false, and B will indeed be chosen. In the case of the
4820 second transformation, the condition A != B will be true,
4821 and A will be chosen.
4823 The conversions to max() and min() are not correct if B is
4824 a number and A is not. The conditions in the original
4825 expressions will be false, so all four give B. The min()
4826 and max() versions would give a NaN instead. */
4827 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4828 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4829 /* Avoid these transformations if the COND_EXPR may be used
4830 as an lvalue in the C++ front-end. PR c++/19199. */
4831 && (in_gimple_form
4832 || VECTOR_TYPE_P (type)
4833 || (strcmp (lang_hooks.name, "GNU C++") != 0
4834 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4835 || ! maybe_lvalue_p (arg1)
4836 || ! maybe_lvalue_p (arg2)))
4838 tree comp_op0 = arg00;
4839 tree comp_op1 = arg01;
4840 tree comp_type = TREE_TYPE (comp_op0);
4842 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4843 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4845 comp_type = type;
4846 comp_op0 = arg1;
4847 comp_op1 = arg2;
4850 switch (comp_code)
4852 case EQ_EXPR:
4853 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4854 case NE_EXPR:
4855 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4856 case LE_EXPR:
4857 case LT_EXPR:
4858 case UNLE_EXPR:
4859 case UNLT_EXPR:
4860 /* In C++ a ?: expression can be an lvalue, so put the
4861 operand which will be used if they are equal first
4862 so that we can convert this back to the
4863 corresponding COND_EXPR. */
4864 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4866 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4867 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4868 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4869 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4870 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4871 comp_op1, comp_op0);
4872 return pedantic_non_lvalue_loc (loc,
4873 fold_convert_loc (loc, type, tem));
4875 break;
4876 case GE_EXPR:
4877 case GT_EXPR:
4878 case UNGE_EXPR:
4879 case UNGT_EXPR:
4880 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4882 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4883 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4884 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4885 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4886 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4887 comp_op1, comp_op0);
4888 return pedantic_non_lvalue_loc (loc,
4889 fold_convert_loc (loc, type, tem));
4891 break;
4892 case UNEQ_EXPR:
4893 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4894 return pedantic_non_lvalue_loc (loc,
4895 fold_convert_loc (loc, type, arg2));
4896 break;
4897 case LTGT_EXPR:
4898 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4899 return pedantic_non_lvalue_loc (loc,
4900 fold_convert_loc (loc, type, arg1));
4901 break;
4902 default:
4903 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4904 break;
4908 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4909 we might still be able to simplify this. For example,
4910 if C1 is one less or one more than C2, this might have started
4911 out as a MIN or MAX and been transformed by this function.
4912 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4914 if (INTEGRAL_TYPE_P (type)
4915 && TREE_CODE (arg01) == INTEGER_CST
4916 && TREE_CODE (arg2) == INTEGER_CST)
4917 switch (comp_code)
4919 case EQ_EXPR:
4920 if (TREE_CODE (arg1) == INTEGER_CST)
4921 break;
4922 /* We can replace A with C1 in this case. */
4923 arg1 = fold_convert_loc (loc, type, arg01);
4924 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4926 case LT_EXPR:
4927 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4928 MIN_EXPR, to preserve the signedness of the comparison. */
4929 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4930 OEP_ONLY_CONST)
4931 && operand_equal_p (arg01,
4932 const_binop (PLUS_EXPR, arg2,
4933 build_int_cst (type, 1)),
4934 OEP_ONLY_CONST))
4936 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4937 fold_convert_loc (loc, TREE_TYPE (arg00),
4938 arg2));
4939 return pedantic_non_lvalue_loc (loc,
4940 fold_convert_loc (loc, type, tem));
4942 break;
4944 case LE_EXPR:
4945 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4946 as above. */
4947 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4948 OEP_ONLY_CONST)
4949 && operand_equal_p (arg01,
4950 const_binop (MINUS_EXPR, arg2,
4951 build_int_cst (type, 1)),
4952 OEP_ONLY_CONST))
4954 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4955 fold_convert_loc (loc, TREE_TYPE (arg00),
4956 arg2));
4957 return pedantic_non_lvalue_loc (loc,
4958 fold_convert_loc (loc, type, tem));
4960 break;
4962 case GT_EXPR:
4963 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4964 MAX_EXPR, to preserve the signedness of the comparison. */
4965 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4966 OEP_ONLY_CONST)
4967 && operand_equal_p (arg01,
4968 const_binop (MINUS_EXPR, arg2,
4969 build_int_cst (type, 1)),
4970 OEP_ONLY_CONST))
4972 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4973 fold_convert_loc (loc, TREE_TYPE (arg00),
4974 arg2));
4975 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4977 break;
4979 case GE_EXPR:
4980 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4981 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4982 OEP_ONLY_CONST)
4983 && operand_equal_p (arg01,
4984 const_binop (PLUS_EXPR, arg2,
4985 build_int_cst (type, 1)),
4986 OEP_ONLY_CONST))
4988 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4989 fold_convert_loc (loc, TREE_TYPE (arg00),
4990 arg2));
4991 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4993 break;
4994 case NE_EXPR:
4995 break;
4996 default:
4997 gcc_unreachable ();
5000 return NULL_TREE;
5005 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5006 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5007 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5008 false) >= 2)
5009 #endif
5011 /* EXP is some logical combination of boolean tests. See if we can
5012 merge it into some range test. Return the new tree if so. */
5014 static tree
5015 fold_range_test (location_t loc, enum tree_code code, tree type,
5016 tree op0, tree op1)
5018 int or_op = (code == TRUTH_ORIF_EXPR
5019 || code == TRUTH_OR_EXPR);
5020 int in0_p, in1_p, in_p;
5021 tree low0, low1, low, high0, high1, high;
5022 bool strict_overflow_p = false;
5023 tree tem, lhs, rhs;
5024 const char * const warnmsg = G_("assuming signed overflow does not occur "
5025 "when simplifying range test");
5027 if (!INTEGRAL_TYPE_P (type))
5028 return 0;
5030 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5031 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5033 /* If this is an OR operation, invert both sides; we will invert
5034 again at the end. */
5035 if (or_op)
5036 in0_p = ! in0_p, in1_p = ! in1_p;
5038 /* If both expressions are the same, if we can merge the ranges, and we
5039 can build the range test, return it or it inverted. If one of the
5040 ranges is always true or always false, consider it to be the same
5041 expression as the other. */
5042 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5043 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5044 in1_p, low1, high1)
5045 && 0 != (tem = (build_range_check (loc, type,
5046 lhs != 0 ? lhs
5047 : rhs != 0 ? rhs : integer_zero_node,
5048 in_p, low, high))))
5050 if (strict_overflow_p)
5051 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5052 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5055 /* On machines where the branch cost is expensive, if this is a
5056 short-circuited branch and the underlying object on both sides
5057 is the same, make a non-short-circuit operation. */
5058 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5059 && lhs != 0 && rhs != 0
5060 && (code == TRUTH_ANDIF_EXPR
5061 || code == TRUTH_ORIF_EXPR)
5062 && operand_equal_p (lhs, rhs, 0))
5064 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5065 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5066 which cases we can't do this. */
5067 if (simple_operand_p (lhs))
5068 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5069 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5070 type, op0, op1);
5072 else if (!lang_hooks.decls.global_bindings_p ()
5073 && !CONTAINS_PLACEHOLDER_P (lhs))
5075 tree common = save_expr (lhs);
5077 if (0 != (lhs = build_range_check (loc, type, common,
5078 or_op ? ! in0_p : in0_p,
5079 low0, high0))
5080 && (0 != (rhs = build_range_check (loc, type, common,
5081 or_op ? ! in1_p : in1_p,
5082 low1, high1))))
5084 if (strict_overflow_p)
5085 fold_overflow_warning (warnmsg,
5086 WARN_STRICT_OVERFLOW_COMPARISON);
5087 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5088 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5089 type, lhs, rhs);
5094 return 0;
5097 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5098 bit value. Arrange things so the extra bits will be set to zero if and
5099 only if C is signed-extended to its full width. If MASK is nonzero,
5100 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5102 static tree
5103 unextend (tree c, int p, int unsignedp, tree mask)
5105 tree type = TREE_TYPE (c);
5106 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5107 tree temp;
5109 if (p == modesize || unsignedp)
5110 return c;
5112 /* We work by getting just the sign bit into the low-order bit, then
5113 into the high-order bit, then sign-extend. We then XOR that value
5114 with C. */
5115 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5116 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5118 /* We must use a signed type in order to get an arithmetic right shift.
5119 However, we must also avoid introducing accidental overflows, so that
5120 a subsequent call to integer_zerop will work. Hence we must
5121 do the type conversion here. At this point, the constant is either
5122 zero or one, and the conversion to a signed type can never overflow.
5123 We could get an overflow if this conversion is done anywhere else. */
5124 if (TYPE_UNSIGNED (type))
5125 temp = fold_convert (signed_type_for (type), temp);
5127 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5128 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5129 if (mask != 0)
5130 temp = const_binop (BIT_AND_EXPR, temp,
5131 fold_convert (TREE_TYPE (c), mask));
5132 /* If necessary, convert the type back to match the type of C. */
5133 if (TYPE_UNSIGNED (type))
5134 temp = fold_convert (type, temp);
5136 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5139 /* For an expression that has the form
5140 (A && B) || ~B
5142 (A || B) && ~B,
5143 we can drop one of the inner expressions and simplify to
5144 A || ~B
5146 A && ~B
5147 LOC is the location of the resulting expression. OP is the inner
5148 logical operation; the left-hand side in the examples above, while CMPOP
5149 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5150 removing a condition that guards another, as in
5151 (A != NULL && A->...) || A == NULL
5152 which we must not transform. If RHS_ONLY is true, only eliminate the
5153 right-most operand of the inner logical operation. */
5155 static tree
5156 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5157 bool rhs_only)
5159 tree type = TREE_TYPE (cmpop);
5160 enum tree_code code = TREE_CODE (cmpop);
5161 enum tree_code truthop_code = TREE_CODE (op);
5162 tree lhs = TREE_OPERAND (op, 0);
5163 tree rhs = TREE_OPERAND (op, 1);
5164 tree orig_lhs = lhs, orig_rhs = rhs;
5165 enum tree_code rhs_code = TREE_CODE (rhs);
5166 enum tree_code lhs_code = TREE_CODE (lhs);
5167 enum tree_code inv_code;
5169 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5170 return NULL_TREE;
5172 if (TREE_CODE_CLASS (code) != tcc_comparison)
5173 return NULL_TREE;
5175 if (rhs_code == truthop_code)
5177 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5178 if (newrhs != NULL_TREE)
5180 rhs = newrhs;
5181 rhs_code = TREE_CODE (rhs);
5184 if (lhs_code == truthop_code && !rhs_only)
5186 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5187 if (newlhs != NULL_TREE)
5189 lhs = newlhs;
5190 lhs_code = TREE_CODE (lhs);
5194 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5195 if (inv_code == rhs_code
5196 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5197 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5198 return lhs;
5199 if (!rhs_only && inv_code == lhs_code
5200 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5201 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5202 return rhs;
5203 if (rhs != orig_rhs || lhs != orig_lhs)
5204 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5205 lhs, rhs);
5206 return NULL_TREE;
5209 /* Find ways of folding logical expressions of LHS and RHS:
5210 Try to merge two comparisons to the same innermost item.
5211 Look for range tests like "ch >= '0' && ch <= '9'".
5212 Look for combinations of simple terms on machines with expensive branches
5213 and evaluate the RHS unconditionally.
5215 For example, if we have p->a == 2 && p->b == 4 and we can make an
5216 object large enough to span both A and B, we can do this with a comparison
5217 against the object ANDed with the a mask.
5219 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5220 operations to do this with one comparison.
5222 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5223 function and the one above.
5225 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5226 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5228 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5229 two operands.
5231 We return the simplified tree or 0 if no optimization is possible. */
5233 static tree
5234 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5235 tree lhs, tree rhs)
5237 /* If this is the "or" of two comparisons, we can do something if
5238 the comparisons are NE_EXPR. If this is the "and", we can do something
5239 if the comparisons are EQ_EXPR. I.e.,
5240 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5242 WANTED_CODE is this operation code. For single bit fields, we can
5243 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5244 comparison for one-bit fields. */
5246 enum tree_code wanted_code;
5247 enum tree_code lcode, rcode;
5248 tree ll_arg, lr_arg, rl_arg, rr_arg;
5249 tree ll_inner, lr_inner, rl_inner, rr_inner;
5250 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5251 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5252 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5253 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5254 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5255 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5256 enum machine_mode lnmode, rnmode;
5257 tree ll_mask, lr_mask, rl_mask, rr_mask;
5258 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5259 tree l_const, r_const;
5260 tree lntype, rntype, result;
5261 HOST_WIDE_INT first_bit, end_bit;
5262 int volatilep;
5264 /* Start by getting the comparison codes. Fail if anything is volatile.
5265 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5266 it were surrounded with a NE_EXPR. */
5268 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5269 return 0;
5271 lcode = TREE_CODE (lhs);
5272 rcode = TREE_CODE (rhs);
5274 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5276 lhs = build2 (NE_EXPR, truth_type, lhs,
5277 build_int_cst (TREE_TYPE (lhs), 0));
5278 lcode = NE_EXPR;
5281 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5283 rhs = build2 (NE_EXPR, truth_type, rhs,
5284 build_int_cst (TREE_TYPE (rhs), 0));
5285 rcode = NE_EXPR;
5288 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5289 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5290 return 0;
5292 ll_arg = TREE_OPERAND (lhs, 0);
5293 lr_arg = TREE_OPERAND (lhs, 1);
5294 rl_arg = TREE_OPERAND (rhs, 0);
5295 rr_arg = TREE_OPERAND (rhs, 1);
5297 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5298 if (simple_operand_p (ll_arg)
5299 && simple_operand_p (lr_arg))
5301 if (operand_equal_p (ll_arg, rl_arg, 0)
5302 && operand_equal_p (lr_arg, rr_arg, 0))
5304 result = combine_comparisons (loc, code, lcode, rcode,
5305 truth_type, ll_arg, lr_arg);
5306 if (result)
5307 return result;
5309 else if (operand_equal_p (ll_arg, rr_arg, 0)
5310 && operand_equal_p (lr_arg, rl_arg, 0))
5312 result = combine_comparisons (loc, code, lcode,
5313 swap_tree_comparison (rcode),
5314 truth_type, ll_arg, lr_arg);
5315 if (result)
5316 return result;
5320 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5321 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5323 /* If the RHS can be evaluated unconditionally and its operands are
5324 simple, it wins to evaluate the RHS unconditionally on machines
5325 with expensive branches. In this case, this isn't a comparison
5326 that can be merged. */
5328 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5329 false) >= 2
5330 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5331 && simple_operand_p (rl_arg)
5332 && simple_operand_p (rr_arg))
5334 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5335 if (code == TRUTH_OR_EXPR
5336 && lcode == NE_EXPR && integer_zerop (lr_arg)
5337 && rcode == NE_EXPR && integer_zerop (rr_arg)
5338 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5339 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5340 return build2_loc (loc, NE_EXPR, truth_type,
5341 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5342 ll_arg, rl_arg),
5343 build_int_cst (TREE_TYPE (ll_arg), 0));
5345 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5346 if (code == TRUTH_AND_EXPR
5347 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5348 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5349 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5350 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5351 return build2_loc (loc, EQ_EXPR, truth_type,
5352 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5353 ll_arg, rl_arg),
5354 build_int_cst (TREE_TYPE (ll_arg), 0));
5357 /* See if the comparisons can be merged. Then get all the parameters for
5358 each side. */
5360 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5361 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5362 return 0;
5364 volatilep = 0;
5365 ll_inner = decode_field_reference (loc, ll_arg,
5366 &ll_bitsize, &ll_bitpos, &ll_mode,
5367 &ll_unsignedp, &volatilep, &ll_mask,
5368 &ll_and_mask);
5369 lr_inner = decode_field_reference (loc, lr_arg,
5370 &lr_bitsize, &lr_bitpos, &lr_mode,
5371 &lr_unsignedp, &volatilep, &lr_mask,
5372 &lr_and_mask);
5373 rl_inner = decode_field_reference (loc, rl_arg,
5374 &rl_bitsize, &rl_bitpos, &rl_mode,
5375 &rl_unsignedp, &volatilep, &rl_mask,
5376 &rl_and_mask);
5377 rr_inner = decode_field_reference (loc, rr_arg,
5378 &rr_bitsize, &rr_bitpos, &rr_mode,
5379 &rr_unsignedp, &volatilep, &rr_mask,
5380 &rr_and_mask);
5382 /* It must be true that the inner operation on the lhs of each
5383 comparison must be the same if we are to be able to do anything.
5384 Then see if we have constants. If not, the same must be true for
5385 the rhs's. */
5386 if (volatilep || ll_inner == 0 || rl_inner == 0
5387 || ! operand_equal_p (ll_inner, rl_inner, 0))
5388 return 0;
5390 if (TREE_CODE (lr_arg) == INTEGER_CST
5391 && TREE_CODE (rr_arg) == INTEGER_CST)
5392 l_const = lr_arg, r_const = rr_arg;
5393 else if (lr_inner == 0 || rr_inner == 0
5394 || ! operand_equal_p (lr_inner, rr_inner, 0))
5395 return 0;
5396 else
5397 l_const = r_const = 0;
5399 /* If either comparison code is not correct for our logical operation,
5400 fail. However, we can convert a one-bit comparison against zero into
5401 the opposite comparison against that bit being set in the field. */
5403 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5404 if (lcode != wanted_code)
5406 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5408 /* Make the left operand unsigned, since we are only interested
5409 in the value of one bit. Otherwise we are doing the wrong
5410 thing below. */
5411 ll_unsignedp = 1;
5412 l_const = ll_mask;
5414 else
5415 return 0;
5418 /* This is analogous to the code for l_const above. */
5419 if (rcode != wanted_code)
5421 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5423 rl_unsignedp = 1;
5424 r_const = rl_mask;
5426 else
5427 return 0;
5430 /* See if we can find a mode that contains both fields being compared on
5431 the left. If we can't, fail. Otherwise, update all constants and masks
5432 to be relative to a field of that size. */
5433 first_bit = MIN (ll_bitpos, rl_bitpos);
5434 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5435 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5436 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5437 volatilep);
5438 if (lnmode == VOIDmode)
5439 return 0;
5441 lnbitsize = GET_MODE_BITSIZE (lnmode);
5442 lnbitpos = first_bit & ~ (lnbitsize - 1);
5443 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5444 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5446 if (BYTES_BIG_ENDIAN)
5448 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5449 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5452 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5453 size_int (xll_bitpos));
5454 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5455 size_int (xrl_bitpos));
5457 if (l_const)
5459 l_const = fold_convert_loc (loc, lntype, l_const);
5460 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5461 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5462 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5463 fold_build1_loc (loc, BIT_NOT_EXPR,
5464 lntype, ll_mask))))
5466 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5468 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5471 if (r_const)
5473 r_const = fold_convert_loc (loc, lntype, r_const);
5474 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5475 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5476 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5477 fold_build1_loc (loc, BIT_NOT_EXPR,
5478 lntype, rl_mask))))
5480 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5482 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5486 /* If the right sides are not constant, do the same for it. Also,
5487 disallow this optimization if a size or signedness mismatch occurs
5488 between the left and right sides. */
5489 if (l_const == 0)
5491 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5492 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5493 /* Make sure the two fields on the right
5494 correspond to the left without being swapped. */
5495 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5496 return 0;
5498 first_bit = MIN (lr_bitpos, rr_bitpos);
5499 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5500 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5501 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5502 volatilep);
5503 if (rnmode == VOIDmode)
5504 return 0;
5506 rnbitsize = GET_MODE_BITSIZE (rnmode);
5507 rnbitpos = first_bit & ~ (rnbitsize - 1);
5508 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5509 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5511 if (BYTES_BIG_ENDIAN)
5513 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5514 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5517 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5518 rntype, lr_mask),
5519 size_int (xlr_bitpos));
5520 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5521 rntype, rr_mask),
5522 size_int (xrr_bitpos));
5524 /* Make a mask that corresponds to both fields being compared.
5525 Do this for both items being compared. If the operands are the
5526 same size and the bits being compared are in the same position
5527 then we can do this by masking both and comparing the masked
5528 results. */
5529 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5530 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5531 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5533 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5534 ll_unsignedp || rl_unsignedp);
5535 if (! all_ones_mask_p (ll_mask, lnbitsize))
5536 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5538 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5539 lr_unsignedp || rr_unsignedp);
5540 if (! all_ones_mask_p (lr_mask, rnbitsize))
5541 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5543 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5546 /* There is still another way we can do something: If both pairs of
5547 fields being compared are adjacent, we may be able to make a wider
5548 field containing them both.
5550 Note that we still must mask the lhs/rhs expressions. Furthermore,
5551 the mask must be shifted to account for the shift done by
5552 make_bit_field_ref. */
5553 if ((ll_bitsize + ll_bitpos == rl_bitpos
5554 && lr_bitsize + lr_bitpos == rr_bitpos)
5555 || (ll_bitpos == rl_bitpos + rl_bitsize
5556 && lr_bitpos == rr_bitpos + rr_bitsize))
5558 tree type;
5560 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5561 ll_bitsize + rl_bitsize,
5562 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5563 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5564 lr_bitsize + rr_bitsize,
5565 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5567 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5568 size_int (MIN (xll_bitpos, xrl_bitpos)));
5569 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5570 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5572 /* Convert to the smaller type before masking out unwanted bits. */
5573 type = lntype;
5574 if (lntype != rntype)
5576 if (lnbitsize > rnbitsize)
5578 lhs = fold_convert_loc (loc, rntype, lhs);
5579 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5580 type = rntype;
5582 else if (lnbitsize < rnbitsize)
5584 rhs = fold_convert_loc (loc, lntype, rhs);
5585 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5586 type = lntype;
5590 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5591 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5593 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5594 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5596 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5599 return 0;
5602 /* Handle the case of comparisons with constants. If there is something in
5603 common between the masks, those bits of the constants must be the same.
5604 If not, the condition is always false. Test for this to avoid generating
5605 incorrect code below. */
5606 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5607 if (! integer_zerop (result)
5608 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5609 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5611 if (wanted_code == NE_EXPR)
5613 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5614 return constant_boolean_node (true, truth_type);
5616 else
5618 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5619 return constant_boolean_node (false, truth_type);
5623 /* Construct the expression we will return. First get the component
5624 reference we will make. Unless the mask is all ones the width of
5625 that field, perform the mask operation. Then compare with the
5626 merged constant. */
5627 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5628 ll_unsignedp || rl_unsignedp);
5630 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5631 if (! all_ones_mask_p (ll_mask, lnbitsize))
5632 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5634 return build2_loc (loc, wanted_code, truth_type, result,
5635 const_binop (BIT_IOR_EXPR, l_const, r_const));
5638 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5639 constant. */
5641 static tree
5642 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5643 tree op0, tree op1)
5645 tree arg0 = op0;
5646 enum tree_code op_code;
5647 tree comp_const;
5648 tree minmax_const;
5649 int consts_equal, consts_lt;
5650 tree inner;
5652 STRIP_SIGN_NOPS (arg0);
5654 op_code = TREE_CODE (arg0);
5655 minmax_const = TREE_OPERAND (arg0, 1);
5656 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5657 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5658 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5659 inner = TREE_OPERAND (arg0, 0);
5661 /* If something does not permit us to optimize, return the original tree. */
5662 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5663 || TREE_CODE (comp_const) != INTEGER_CST
5664 || TREE_OVERFLOW (comp_const)
5665 || TREE_CODE (minmax_const) != INTEGER_CST
5666 || TREE_OVERFLOW (minmax_const))
5667 return NULL_TREE;
5669 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5670 and GT_EXPR, doing the rest with recursive calls using logical
5671 simplifications. */
5672 switch (code)
5674 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5676 tree tem
5677 = optimize_minmax_comparison (loc,
5678 invert_tree_comparison (code, false),
5679 type, op0, op1);
5680 if (tem)
5681 return invert_truthvalue_loc (loc, tem);
5682 return NULL_TREE;
5685 case GE_EXPR:
5686 return
5687 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5688 optimize_minmax_comparison
5689 (loc, EQ_EXPR, type, arg0, comp_const),
5690 optimize_minmax_comparison
5691 (loc, GT_EXPR, type, arg0, comp_const));
5693 case EQ_EXPR:
5694 if (op_code == MAX_EXPR && consts_equal)
5695 /* MAX (X, 0) == 0 -> X <= 0 */
5696 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5698 else if (op_code == MAX_EXPR && consts_lt)
5699 /* MAX (X, 0) == 5 -> X == 5 */
5700 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5702 else if (op_code == MAX_EXPR)
5703 /* MAX (X, 0) == -1 -> false */
5704 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5706 else if (consts_equal)
5707 /* MIN (X, 0) == 0 -> X >= 0 */
5708 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5710 else if (consts_lt)
5711 /* MIN (X, 0) == 5 -> false */
5712 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5714 else
5715 /* MIN (X, 0) == -1 -> X == -1 */
5716 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5718 case GT_EXPR:
5719 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5720 /* MAX (X, 0) > 0 -> X > 0
5721 MAX (X, 0) > 5 -> X > 5 */
5722 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5724 else if (op_code == MAX_EXPR)
5725 /* MAX (X, 0) > -1 -> true */
5726 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5728 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5729 /* MIN (X, 0) > 0 -> false
5730 MIN (X, 0) > 5 -> false */
5731 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5733 else
5734 /* MIN (X, 0) > -1 -> X > -1 */
5735 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5737 default:
5738 return NULL_TREE;
5742 /* T is an integer expression that is being multiplied, divided, or taken a
5743 modulus (CODE says which and what kind of divide or modulus) by a
5744 constant C. See if we can eliminate that operation by folding it with
5745 other operations already in T. WIDE_TYPE, if non-null, is a type that
5746 should be used for the computation if wider than our type.
5748 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5749 (X * 2) + (Y * 4). We must, however, be assured that either the original
5750 expression would not overflow or that overflow is undefined for the type
5751 in the language in question.
5753 If we return a non-null expression, it is an equivalent form of the
5754 original computation, but need not be in the original type.
5756 We set *STRICT_OVERFLOW_P to true if the return values depends on
5757 signed overflow being undefined. Otherwise we do not change
5758 *STRICT_OVERFLOW_P. */
5760 static tree
5761 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5762 bool *strict_overflow_p)
5764 /* To avoid exponential search depth, refuse to allow recursion past
5765 three levels. Beyond that (1) it's highly unlikely that we'll find
5766 something interesting and (2) we've probably processed it before
5767 when we built the inner expression. */
5769 static int depth;
5770 tree ret;
5772 if (depth > 3)
5773 return NULL;
5775 depth++;
5776 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5777 depth--;
5779 return ret;
5782 static tree
5783 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5784 bool *strict_overflow_p)
5786 tree type = TREE_TYPE (t);
5787 enum tree_code tcode = TREE_CODE (t);
5788 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5789 > GET_MODE_SIZE (TYPE_MODE (type)))
5790 ? wide_type : type);
5791 tree t1, t2;
5792 int same_p = tcode == code;
5793 tree op0 = NULL_TREE, op1 = NULL_TREE;
5794 bool sub_strict_overflow_p;
5796 /* Don't deal with constants of zero here; they confuse the code below. */
5797 if (integer_zerop (c))
5798 return NULL_TREE;
5800 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5801 op0 = TREE_OPERAND (t, 0);
5803 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5804 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5806 /* Note that we need not handle conditional operations here since fold
5807 already handles those cases. So just do arithmetic here. */
5808 switch (tcode)
5810 case INTEGER_CST:
5811 /* For a constant, we can always simplify if we are a multiply
5812 or (for divide and modulus) if it is a multiple of our constant. */
5813 if (code == MULT_EXPR
5814 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5815 return const_binop (code, fold_convert (ctype, t),
5816 fold_convert (ctype, c));
5817 break;
5819 CASE_CONVERT: case NON_LVALUE_EXPR:
5820 /* If op0 is an expression ... */
5821 if ((COMPARISON_CLASS_P (op0)
5822 || UNARY_CLASS_P (op0)
5823 || BINARY_CLASS_P (op0)
5824 || VL_EXP_CLASS_P (op0)
5825 || EXPRESSION_CLASS_P (op0))
5826 /* ... and has wrapping overflow, and its type is smaller
5827 than ctype, then we cannot pass through as widening. */
5828 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5829 && (TYPE_PRECISION (ctype)
5830 > TYPE_PRECISION (TREE_TYPE (op0))))
5831 /* ... or this is a truncation (t is narrower than op0),
5832 then we cannot pass through this narrowing. */
5833 || (TYPE_PRECISION (type)
5834 < TYPE_PRECISION (TREE_TYPE (op0)))
5835 /* ... or signedness changes for division or modulus,
5836 then we cannot pass through this conversion. */
5837 || (code != MULT_EXPR
5838 && (TYPE_UNSIGNED (ctype)
5839 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5840 /* ... or has undefined overflow while the converted to
5841 type has not, we cannot do the operation in the inner type
5842 as that would introduce undefined overflow. */
5843 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5844 && !TYPE_OVERFLOW_UNDEFINED (type))))
5845 break;
5847 /* Pass the constant down and see if we can make a simplification. If
5848 we can, replace this expression with the inner simplification for
5849 possible later conversion to our or some other type. */
5850 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5851 && TREE_CODE (t2) == INTEGER_CST
5852 && !TREE_OVERFLOW (t2)
5853 && (0 != (t1 = extract_muldiv (op0, t2, code,
5854 code == MULT_EXPR
5855 ? ctype : NULL_TREE,
5856 strict_overflow_p))))
5857 return t1;
5858 break;
5860 case ABS_EXPR:
5861 /* If widening the type changes it from signed to unsigned, then we
5862 must avoid building ABS_EXPR itself as unsigned. */
5863 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5865 tree cstype = (*signed_type_for) (ctype);
5866 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5867 != 0)
5869 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5870 return fold_convert (ctype, t1);
5872 break;
5874 /* If the constant is negative, we cannot simplify this. */
5875 if (tree_int_cst_sgn (c) == -1)
5876 break;
5877 /* FALLTHROUGH */
5878 case NEGATE_EXPR:
5879 /* For division and modulus, type can't be unsigned, as e.g.
5880 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5881 For signed types, even with wrapping overflow, this is fine. */
5882 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5883 break;
5884 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5885 != 0)
5886 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5887 break;
5889 case MIN_EXPR: case MAX_EXPR:
5890 /* If widening the type changes the signedness, then we can't perform
5891 this optimization as that changes the result. */
5892 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5893 break;
5895 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5896 sub_strict_overflow_p = false;
5897 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5898 &sub_strict_overflow_p)) != 0
5899 && (t2 = extract_muldiv (op1, c, code, wide_type,
5900 &sub_strict_overflow_p)) != 0)
5902 if (tree_int_cst_sgn (c) < 0)
5903 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5904 if (sub_strict_overflow_p)
5905 *strict_overflow_p = true;
5906 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5907 fold_convert (ctype, t2));
5909 break;
5911 case LSHIFT_EXPR: case RSHIFT_EXPR:
5912 /* If the second operand is constant, this is a multiplication
5913 or floor division, by a power of two, so we can treat it that
5914 way unless the multiplier or divisor overflows. Signed
5915 left-shift overflow is implementation-defined rather than
5916 undefined in C90, so do not convert signed left shift into
5917 multiplication. */
5918 if (TREE_CODE (op1) == INTEGER_CST
5919 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5920 /* const_binop may not detect overflow correctly,
5921 so check for it explicitly here. */
5922 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5923 && TREE_INT_CST_HIGH (op1) == 0
5924 && 0 != (t1 = fold_convert (ctype,
5925 const_binop (LSHIFT_EXPR,
5926 size_one_node,
5927 op1)))
5928 && !TREE_OVERFLOW (t1))
5929 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5930 ? MULT_EXPR : FLOOR_DIV_EXPR,
5931 ctype,
5932 fold_convert (ctype, op0),
5933 t1),
5934 c, code, wide_type, strict_overflow_p);
5935 break;
5937 case PLUS_EXPR: case MINUS_EXPR:
5938 /* See if we can eliminate the operation on both sides. If we can, we
5939 can return a new PLUS or MINUS. If we can't, the only remaining
5940 cases where we can do anything are if the second operand is a
5941 constant. */
5942 sub_strict_overflow_p = false;
5943 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5944 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5945 if (t1 != 0 && t2 != 0
5946 && (code == MULT_EXPR
5947 /* If not multiplication, we can only do this if both operands
5948 are divisible by c. */
5949 || (multiple_of_p (ctype, op0, c)
5950 && multiple_of_p (ctype, op1, c))))
5952 if (sub_strict_overflow_p)
5953 *strict_overflow_p = true;
5954 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5955 fold_convert (ctype, t2));
5958 /* If this was a subtraction, negate OP1 and set it to be an addition.
5959 This simplifies the logic below. */
5960 if (tcode == MINUS_EXPR)
5962 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5963 /* If OP1 was not easily negatable, the constant may be OP0. */
5964 if (TREE_CODE (op0) == INTEGER_CST)
5966 tree tem = op0;
5967 op0 = op1;
5968 op1 = tem;
5969 tem = t1;
5970 t1 = t2;
5971 t2 = tem;
5975 if (TREE_CODE (op1) != INTEGER_CST)
5976 break;
5978 /* If either OP1 or C are negative, this optimization is not safe for
5979 some of the division and remainder types while for others we need
5980 to change the code. */
5981 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5983 if (code == CEIL_DIV_EXPR)
5984 code = FLOOR_DIV_EXPR;
5985 else if (code == FLOOR_DIV_EXPR)
5986 code = CEIL_DIV_EXPR;
5987 else if (code != MULT_EXPR
5988 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5989 break;
5992 /* If it's a multiply or a division/modulus operation of a multiple
5993 of our constant, do the operation and verify it doesn't overflow. */
5994 if (code == MULT_EXPR
5995 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5997 op1 = const_binop (code, fold_convert (ctype, op1),
5998 fold_convert (ctype, c));
5999 /* We allow the constant to overflow with wrapping semantics. */
6000 if (op1 == 0
6001 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6002 break;
6004 else
6005 break;
6007 /* If we have an unsigned type, we cannot widen the operation since it
6008 will change the result if the original computation overflowed. */
6009 if (TYPE_UNSIGNED (ctype) && ctype != type)
6010 break;
6012 /* If we were able to eliminate our operation from the first side,
6013 apply our operation to the second side and reform the PLUS. */
6014 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6015 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6017 /* The last case is if we are a multiply. In that case, we can
6018 apply the distributive law to commute the multiply and addition
6019 if the multiplication of the constants doesn't overflow
6020 and overflow is defined. With undefined overflow
6021 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6022 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6023 return fold_build2 (tcode, ctype,
6024 fold_build2 (code, ctype,
6025 fold_convert (ctype, op0),
6026 fold_convert (ctype, c)),
6027 op1);
6029 break;
6031 case MULT_EXPR:
6032 /* We have a special case here if we are doing something like
6033 (C * 8) % 4 since we know that's zero. */
6034 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6035 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6036 /* If the multiplication can overflow we cannot optimize this. */
6037 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6038 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6039 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6041 *strict_overflow_p = true;
6042 return omit_one_operand (type, integer_zero_node, op0);
6045 /* ... fall through ... */
6047 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6048 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6049 /* If we can extract our operation from the LHS, do so and return a
6050 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6051 do something only if the second operand is a constant. */
6052 if (same_p
6053 && (t1 = extract_muldiv (op0, c, code, wide_type,
6054 strict_overflow_p)) != 0)
6055 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6056 fold_convert (ctype, op1));
6057 else if (tcode == MULT_EXPR && code == MULT_EXPR
6058 && (t1 = extract_muldiv (op1, c, code, wide_type,
6059 strict_overflow_p)) != 0)
6060 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6061 fold_convert (ctype, t1));
6062 else if (TREE_CODE (op1) != INTEGER_CST)
6063 return 0;
6065 /* If these are the same operation types, we can associate them
6066 assuming no overflow. */
6067 if (tcode == code)
6069 double_int mul;
6070 bool overflow_p;
6071 unsigned prec = TYPE_PRECISION (ctype);
6072 bool uns = TYPE_UNSIGNED (ctype);
6073 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6074 double_int dic = tree_to_double_int (c).ext (prec, uns);
6075 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6076 overflow_p = ((!uns && overflow_p)
6077 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6078 if (!double_int_fits_to_tree_p (ctype, mul)
6079 && ((uns && tcode != MULT_EXPR) || !uns))
6080 overflow_p = 1;
6081 if (!overflow_p)
6082 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6083 double_int_to_tree (ctype, mul));
6086 /* If these operations "cancel" each other, we have the main
6087 optimizations of this pass, which occur when either constant is a
6088 multiple of the other, in which case we replace this with either an
6089 operation or CODE or TCODE.
6091 If we have an unsigned type, we cannot do this since it will change
6092 the result if the original computation overflowed. */
6093 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6094 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6095 || (tcode == MULT_EXPR
6096 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6097 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6098 && code != MULT_EXPR)))
6100 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6102 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6103 *strict_overflow_p = true;
6104 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6105 fold_convert (ctype,
6106 const_binop (TRUNC_DIV_EXPR,
6107 op1, c)));
6109 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6111 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6112 *strict_overflow_p = true;
6113 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6114 fold_convert (ctype,
6115 const_binop (TRUNC_DIV_EXPR,
6116 c, op1)));
6119 break;
6121 default:
6122 break;
6125 return 0;
6128 /* Return a node which has the indicated constant VALUE (either 0 or
6129 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6130 and is of the indicated TYPE. */
6132 tree
6133 constant_boolean_node (bool value, tree type)
6135 if (type == integer_type_node)
6136 return value ? integer_one_node : integer_zero_node;
6137 else if (type == boolean_type_node)
6138 return value ? boolean_true_node : boolean_false_node;
6139 else if (TREE_CODE (type) == VECTOR_TYPE)
6140 return build_vector_from_val (type,
6141 build_int_cst (TREE_TYPE (type),
6142 value ? -1 : 0));
6143 else
6144 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6148 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6149 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6150 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6151 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6152 COND is the first argument to CODE; otherwise (as in the example
6153 given here), it is the second argument. TYPE is the type of the
6154 original expression. Return NULL_TREE if no simplification is
6155 possible. */
6157 static tree
6158 fold_binary_op_with_conditional_arg (location_t loc,
6159 enum tree_code code,
6160 tree type, tree op0, tree op1,
6161 tree cond, tree arg, int cond_first_p)
6163 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6164 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6165 tree test, true_value, false_value;
6166 tree lhs = NULL_TREE;
6167 tree rhs = NULL_TREE;
6168 enum tree_code cond_code = COND_EXPR;
6170 if (TREE_CODE (cond) == COND_EXPR
6171 || TREE_CODE (cond) == VEC_COND_EXPR)
6173 test = TREE_OPERAND (cond, 0);
6174 true_value = TREE_OPERAND (cond, 1);
6175 false_value = TREE_OPERAND (cond, 2);
6176 /* If this operand throws an expression, then it does not make
6177 sense to try to perform a logical or arithmetic operation
6178 involving it. */
6179 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6180 lhs = true_value;
6181 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6182 rhs = false_value;
6184 else
6186 tree testtype = TREE_TYPE (cond);
6187 test = cond;
6188 true_value = constant_boolean_node (true, testtype);
6189 false_value = constant_boolean_node (false, testtype);
6192 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6193 cond_code = VEC_COND_EXPR;
6195 /* This transformation is only worthwhile if we don't have to wrap ARG
6196 in a SAVE_EXPR and the operation can be simplified without recursing
6197 on at least one of the branches once its pushed inside the COND_EXPR. */
6198 if (!TREE_CONSTANT (arg)
6199 && (TREE_SIDE_EFFECTS (arg)
6200 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6201 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6202 return NULL_TREE;
6204 arg = fold_convert_loc (loc, arg_type, arg);
6205 if (lhs == 0)
6207 true_value = fold_convert_loc (loc, cond_type, true_value);
6208 if (cond_first_p)
6209 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6210 else
6211 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6213 if (rhs == 0)
6215 false_value = fold_convert_loc (loc, cond_type, false_value);
6216 if (cond_first_p)
6217 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6218 else
6219 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6222 /* Check that we have simplified at least one of the branches. */
6223 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6224 return NULL_TREE;
6226 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6230 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6232 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6233 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6234 ADDEND is the same as X.
6236 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6237 and finite. The problematic cases are when X is zero, and its mode
6238 has signed zeros. In the case of rounding towards -infinity,
6239 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6240 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6242 bool
6243 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6245 if (!real_zerop (addend))
6246 return false;
6248 /* Don't allow the fold with -fsignaling-nans. */
6249 if (HONOR_SNANS (TYPE_MODE (type)))
6250 return false;
6252 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6253 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6254 return true;
6256 /* In a vector or complex, we would need to check the sign of all zeros. */
6257 if (TREE_CODE (addend) != REAL_CST)
6258 return false;
6260 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6261 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6262 negate = !negate;
6264 /* The mode has signed zeros, and we have to honor their sign.
6265 In this situation, there is only one case we can return true for.
6266 X - 0 is the same as X unless rounding towards -infinity is
6267 supported. */
6268 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6271 /* Subroutine of fold() that checks comparisons of built-in math
6272 functions against real constants.
6274 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6275 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6276 is the type of the result and ARG0 and ARG1 are the operands of the
6277 comparison. ARG1 must be a TREE_REAL_CST.
6279 The function returns the constant folded tree if a simplification
6280 can be made, and NULL_TREE otherwise. */
6282 static tree
6283 fold_mathfn_compare (location_t loc,
6284 enum built_in_function fcode, enum tree_code code,
6285 tree type, tree arg0, tree arg1)
6287 REAL_VALUE_TYPE c;
6289 if (BUILTIN_SQRT_P (fcode))
6291 tree arg = CALL_EXPR_ARG (arg0, 0);
6292 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6294 c = TREE_REAL_CST (arg1);
6295 if (REAL_VALUE_NEGATIVE (c))
6297 /* sqrt(x) < y is always false, if y is negative. */
6298 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6299 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6301 /* sqrt(x) > y is always true, if y is negative and we
6302 don't care about NaNs, i.e. negative values of x. */
6303 if (code == NE_EXPR || !HONOR_NANS (mode))
6304 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6306 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6307 return fold_build2_loc (loc, GE_EXPR, type, arg,
6308 build_real (TREE_TYPE (arg), dconst0));
6310 else if (code == GT_EXPR || code == GE_EXPR)
6312 REAL_VALUE_TYPE c2;
6314 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6315 real_convert (&c2, mode, &c2);
6317 if (REAL_VALUE_ISINF (c2))
6319 /* sqrt(x) > y is x == +Inf, when y is very large. */
6320 if (HONOR_INFINITIES (mode))
6321 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6322 build_real (TREE_TYPE (arg), c2));
6324 /* sqrt(x) > y is always false, when y is very large
6325 and we don't care about infinities. */
6326 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6329 /* sqrt(x) > c is the same as x > c*c. */
6330 return fold_build2_loc (loc, code, type, arg,
6331 build_real (TREE_TYPE (arg), c2));
6333 else if (code == LT_EXPR || code == LE_EXPR)
6335 REAL_VALUE_TYPE c2;
6337 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6338 real_convert (&c2, mode, &c2);
6340 if (REAL_VALUE_ISINF (c2))
6342 /* sqrt(x) < y is always true, when y is a very large
6343 value and we don't care about NaNs or Infinities. */
6344 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6345 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6347 /* sqrt(x) < y is x != +Inf when y is very large and we
6348 don't care about NaNs. */
6349 if (! HONOR_NANS (mode))
6350 return fold_build2_loc (loc, NE_EXPR, type, arg,
6351 build_real (TREE_TYPE (arg), c2));
6353 /* sqrt(x) < y is x >= 0 when y is very large and we
6354 don't care about Infinities. */
6355 if (! HONOR_INFINITIES (mode))
6356 return fold_build2_loc (loc, GE_EXPR, type, arg,
6357 build_real (TREE_TYPE (arg), dconst0));
6359 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6360 arg = save_expr (arg);
6361 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6362 fold_build2_loc (loc, GE_EXPR, type, arg,
6363 build_real (TREE_TYPE (arg),
6364 dconst0)),
6365 fold_build2_loc (loc, NE_EXPR, type, arg,
6366 build_real (TREE_TYPE (arg),
6367 c2)));
6370 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6371 if (! HONOR_NANS (mode))
6372 return fold_build2_loc (loc, code, type, arg,
6373 build_real (TREE_TYPE (arg), c2));
6375 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6376 arg = save_expr (arg);
6377 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6378 fold_build2_loc (loc, GE_EXPR, type, arg,
6379 build_real (TREE_TYPE (arg),
6380 dconst0)),
6381 fold_build2_loc (loc, code, type, arg,
6382 build_real (TREE_TYPE (arg),
6383 c2)));
6387 return NULL_TREE;
6390 /* Subroutine of fold() that optimizes comparisons against Infinities,
6391 either +Inf or -Inf.
6393 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6394 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6395 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6397 The function returns the constant folded tree if a simplification
6398 can be made, and NULL_TREE otherwise. */
6400 static tree
6401 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6402 tree arg0, tree arg1)
6404 enum machine_mode mode;
6405 REAL_VALUE_TYPE max;
6406 tree temp;
6407 bool neg;
6409 mode = TYPE_MODE (TREE_TYPE (arg0));
6411 /* For negative infinity swap the sense of the comparison. */
6412 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6413 if (neg)
6414 code = swap_tree_comparison (code);
6416 switch (code)
6418 case GT_EXPR:
6419 /* x > +Inf is always false, if with ignore sNANs. */
6420 if (HONOR_SNANS (mode))
6421 return NULL_TREE;
6422 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6424 case LE_EXPR:
6425 /* x <= +Inf is always true, if we don't case about NaNs. */
6426 if (! HONOR_NANS (mode))
6427 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6429 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6430 arg0 = save_expr (arg0);
6431 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6433 case EQ_EXPR:
6434 case GE_EXPR:
6435 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6436 real_maxval (&max, neg, mode);
6437 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6438 arg0, build_real (TREE_TYPE (arg0), max));
6440 case LT_EXPR:
6441 /* x < +Inf is always equal to x <= DBL_MAX. */
6442 real_maxval (&max, neg, mode);
6443 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6444 arg0, build_real (TREE_TYPE (arg0), max));
6446 case NE_EXPR:
6447 /* x != +Inf is always equal to !(x > DBL_MAX). */
6448 real_maxval (&max, neg, mode);
6449 if (! HONOR_NANS (mode))
6450 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6451 arg0, build_real (TREE_TYPE (arg0), max));
6453 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6454 arg0, build_real (TREE_TYPE (arg0), max));
6455 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6457 default:
6458 break;
6461 return NULL_TREE;
6464 /* Subroutine of fold() that optimizes comparisons of a division by
6465 a nonzero integer constant against an integer constant, i.e.
6466 X/C1 op C2.
6468 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6469 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6470 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6472 The function returns the constant folded tree if a simplification
6473 can be made, and NULL_TREE otherwise. */
6475 static tree
6476 fold_div_compare (location_t loc,
6477 enum tree_code code, tree type, tree arg0, tree arg1)
6479 tree prod, tmp, hi, lo;
6480 tree arg00 = TREE_OPERAND (arg0, 0);
6481 tree arg01 = TREE_OPERAND (arg0, 1);
6482 double_int val;
6483 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6484 bool neg_overflow;
6485 bool overflow;
6487 /* We have to do this the hard way to detect unsigned overflow.
6488 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6489 val = TREE_INT_CST (arg01)
6490 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6491 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6492 neg_overflow = false;
6494 if (unsigned_p)
6496 tmp = int_const_binop (MINUS_EXPR, arg01,
6497 build_int_cst (TREE_TYPE (arg01), 1));
6498 lo = prod;
6500 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6501 val = TREE_INT_CST (prod)
6502 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6503 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6504 -1, overflow | TREE_OVERFLOW (prod));
6506 else if (tree_int_cst_sgn (arg01) >= 0)
6508 tmp = int_const_binop (MINUS_EXPR, arg01,
6509 build_int_cst (TREE_TYPE (arg01), 1));
6510 switch (tree_int_cst_sgn (arg1))
6512 case -1:
6513 neg_overflow = true;
6514 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6515 hi = prod;
6516 break;
6518 case 0:
6519 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6520 hi = tmp;
6521 break;
6523 case 1:
6524 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6525 lo = prod;
6526 break;
6528 default:
6529 gcc_unreachable ();
6532 else
6534 /* A negative divisor reverses the relational operators. */
6535 code = swap_tree_comparison (code);
6537 tmp = int_const_binop (PLUS_EXPR, arg01,
6538 build_int_cst (TREE_TYPE (arg01), 1));
6539 switch (tree_int_cst_sgn (arg1))
6541 case -1:
6542 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6543 lo = prod;
6544 break;
6546 case 0:
6547 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6548 lo = tmp;
6549 break;
6551 case 1:
6552 neg_overflow = true;
6553 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6554 hi = prod;
6555 break;
6557 default:
6558 gcc_unreachable ();
6562 switch (code)
6564 case EQ_EXPR:
6565 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6566 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6567 if (TREE_OVERFLOW (hi))
6568 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6569 if (TREE_OVERFLOW (lo))
6570 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6571 return build_range_check (loc, type, arg00, 1, lo, hi);
6573 case NE_EXPR:
6574 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6575 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6576 if (TREE_OVERFLOW (hi))
6577 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6578 if (TREE_OVERFLOW (lo))
6579 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6580 return build_range_check (loc, type, arg00, 0, lo, hi);
6582 case LT_EXPR:
6583 if (TREE_OVERFLOW (lo))
6585 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6586 return omit_one_operand_loc (loc, type, tmp, arg00);
6588 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6590 case LE_EXPR:
6591 if (TREE_OVERFLOW (hi))
6593 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6594 return omit_one_operand_loc (loc, type, tmp, arg00);
6596 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6598 case GT_EXPR:
6599 if (TREE_OVERFLOW (hi))
6601 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6602 return omit_one_operand_loc (loc, type, tmp, arg00);
6604 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6606 case GE_EXPR:
6607 if (TREE_OVERFLOW (lo))
6609 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6610 return omit_one_operand_loc (loc, type, tmp, arg00);
6612 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6614 default:
6615 break;
6618 return NULL_TREE;
6622 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6623 equality/inequality test, then return a simplified form of the test
6624 using a sign testing. Otherwise return NULL. TYPE is the desired
6625 result type. */
6627 static tree
6628 fold_single_bit_test_into_sign_test (location_t loc,
6629 enum tree_code code, tree arg0, tree arg1,
6630 tree result_type)
6632 /* If this is testing a single bit, we can optimize the test. */
6633 if ((code == NE_EXPR || code == EQ_EXPR)
6634 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6635 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6637 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6638 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6639 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6641 if (arg00 != NULL_TREE
6642 /* This is only a win if casting to a signed type is cheap,
6643 i.e. when arg00's type is not a partial mode. */
6644 && TYPE_PRECISION (TREE_TYPE (arg00))
6645 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6647 tree stype = signed_type_for (TREE_TYPE (arg00));
6648 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6649 result_type,
6650 fold_convert_loc (loc, stype, arg00),
6651 build_int_cst (stype, 0));
6655 return NULL_TREE;
6658 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6659 equality/inequality test, then return a simplified form of
6660 the test using shifts and logical operations. Otherwise return
6661 NULL. TYPE is the desired result type. */
6663 tree
6664 fold_single_bit_test (location_t loc, enum tree_code code,
6665 tree arg0, tree arg1, tree result_type)
6667 /* If this is testing a single bit, we can optimize the test. */
6668 if ((code == NE_EXPR || code == EQ_EXPR)
6669 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6670 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6672 tree inner = TREE_OPERAND (arg0, 0);
6673 tree type = TREE_TYPE (arg0);
6674 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6675 enum machine_mode operand_mode = TYPE_MODE (type);
6676 int ops_unsigned;
6677 tree signed_type, unsigned_type, intermediate_type;
6678 tree tem, one;
6680 /* First, see if we can fold the single bit test into a sign-bit
6681 test. */
6682 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6683 result_type);
6684 if (tem)
6685 return tem;
6687 /* Otherwise we have (A & C) != 0 where C is a single bit,
6688 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6689 Similarly for (A & C) == 0. */
6691 /* If INNER is a right shift of a constant and it plus BITNUM does
6692 not overflow, adjust BITNUM and INNER. */
6693 if (TREE_CODE (inner) == RSHIFT_EXPR
6694 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6695 && tree_fits_uhwi_p (TREE_OPERAND (inner, 1))
6696 && bitnum < TYPE_PRECISION (type)
6697 && (tree_to_uhwi (TREE_OPERAND (inner, 1))
6698 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6700 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6701 inner = TREE_OPERAND (inner, 0);
6704 /* If we are going to be able to omit the AND below, we must do our
6705 operations as unsigned. If we must use the AND, we have a choice.
6706 Normally unsigned is faster, but for some machines signed is. */
6707 #ifdef LOAD_EXTEND_OP
6708 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6709 && !flag_syntax_only) ? 0 : 1;
6710 #else
6711 ops_unsigned = 1;
6712 #endif
6714 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6715 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6716 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6717 inner = fold_convert_loc (loc, intermediate_type, inner);
6719 if (bitnum != 0)
6720 inner = build2 (RSHIFT_EXPR, intermediate_type,
6721 inner, size_int (bitnum));
6723 one = build_int_cst (intermediate_type, 1);
6725 if (code == EQ_EXPR)
6726 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6728 /* Put the AND last so it can combine with more things. */
6729 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6731 /* Make sure to return the proper type. */
6732 inner = fold_convert_loc (loc, result_type, inner);
6734 return inner;
6736 return NULL_TREE;
6739 /* Check whether we are allowed to reorder operands arg0 and arg1,
6740 such that the evaluation of arg1 occurs before arg0. */
6742 static bool
6743 reorder_operands_p (const_tree arg0, const_tree arg1)
6745 if (! flag_evaluation_order)
6746 return true;
6747 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6748 return true;
6749 return ! TREE_SIDE_EFFECTS (arg0)
6750 && ! TREE_SIDE_EFFECTS (arg1);
6753 /* Test whether it is preferable two swap two operands, ARG0 and
6754 ARG1, for example because ARG0 is an integer constant and ARG1
6755 isn't. If REORDER is true, only recommend swapping if we can
6756 evaluate the operands in reverse order. */
6758 bool
6759 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6761 STRIP_SIGN_NOPS (arg0);
6762 STRIP_SIGN_NOPS (arg1);
6764 if (TREE_CODE (arg1) == INTEGER_CST)
6765 return 0;
6766 if (TREE_CODE (arg0) == INTEGER_CST)
6767 return 1;
6769 if (TREE_CODE (arg1) == REAL_CST)
6770 return 0;
6771 if (TREE_CODE (arg0) == REAL_CST)
6772 return 1;
6774 if (TREE_CODE (arg1) == FIXED_CST)
6775 return 0;
6776 if (TREE_CODE (arg0) == FIXED_CST)
6777 return 1;
6779 if (TREE_CODE (arg1) == COMPLEX_CST)
6780 return 0;
6781 if (TREE_CODE (arg0) == COMPLEX_CST)
6782 return 1;
6784 if (TREE_CONSTANT (arg1))
6785 return 0;
6786 if (TREE_CONSTANT (arg0))
6787 return 1;
6789 if (optimize_function_for_size_p (cfun))
6790 return 0;
6792 if (reorder && flag_evaluation_order
6793 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6794 return 0;
6796 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6797 for commutative and comparison operators. Ensuring a canonical
6798 form allows the optimizers to find additional redundancies without
6799 having to explicitly check for both orderings. */
6800 if (TREE_CODE (arg0) == SSA_NAME
6801 && TREE_CODE (arg1) == SSA_NAME
6802 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6803 return 1;
6805 /* Put SSA_NAMEs last. */
6806 if (TREE_CODE (arg1) == SSA_NAME)
6807 return 0;
6808 if (TREE_CODE (arg0) == SSA_NAME)
6809 return 1;
6811 /* Put variables last. */
6812 if (DECL_P (arg1))
6813 return 0;
6814 if (DECL_P (arg0))
6815 return 1;
6817 return 0;
6820 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6821 ARG0 is extended to a wider type. */
6823 static tree
6824 fold_widened_comparison (location_t loc, enum tree_code code,
6825 tree type, tree arg0, tree arg1)
6827 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6828 tree arg1_unw;
6829 tree shorter_type, outer_type;
6830 tree min, max;
6831 bool above, below;
6833 if (arg0_unw == arg0)
6834 return NULL_TREE;
6835 shorter_type = TREE_TYPE (arg0_unw);
6837 #ifdef HAVE_canonicalize_funcptr_for_compare
6838 /* Disable this optimization if we're casting a function pointer
6839 type on targets that require function pointer canonicalization. */
6840 if (HAVE_canonicalize_funcptr_for_compare
6841 && TREE_CODE (shorter_type) == POINTER_TYPE
6842 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6843 return NULL_TREE;
6844 #endif
6846 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6847 return NULL_TREE;
6849 arg1_unw = get_unwidened (arg1, NULL_TREE);
6851 /* If possible, express the comparison in the shorter mode. */
6852 if ((code == EQ_EXPR || code == NE_EXPR
6853 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6854 && (TREE_TYPE (arg1_unw) == shorter_type
6855 || ((TYPE_PRECISION (shorter_type)
6856 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6857 && (TYPE_UNSIGNED (shorter_type)
6858 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6859 || (TREE_CODE (arg1_unw) == INTEGER_CST
6860 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6861 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6862 && int_fits_type_p (arg1_unw, shorter_type))))
6863 return fold_build2_loc (loc, code, type, arg0_unw,
6864 fold_convert_loc (loc, shorter_type, arg1_unw));
6866 if (TREE_CODE (arg1_unw) != INTEGER_CST
6867 || TREE_CODE (shorter_type) != INTEGER_TYPE
6868 || !int_fits_type_p (arg1_unw, shorter_type))
6869 return NULL_TREE;
6871 /* If we are comparing with the integer that does not fit into the range
6872 of the shorter type, the result is known. */
6873 outer_type = TREE_TYPE (arg1_unw);
6874 min = lower_bound_in_type (outer_type, shorter_type);
6875 max = upper_bound_in_type (outer_type, shorter_type);
6877 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6878 max, arg1_unw));
6879 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6880 arg1_unw, min));
6882 switch (code)
6884 case EQ_EXPR:
6885 if (above || below)
6886 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6887 break;
6889 case NE_EXPR:
6890 if (above || below)
6891 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6892 break;
6894 case LT_EXPR:
6895 case LE_EXPR:
6896 if (above)
6897 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6898 else if (below)
6899 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6901 case GT_EXPR:
6902 case GE_EXPR:
6903 if (above)
6904 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6905 else if (below)
6906 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6908 default:
6909 break;
6912 return NULL_TREE;
6915 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6916 ARG0 just the signedness is changed. */
6918 static tree
6919 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6920 tree arg0, tree arg1)
6922 tree arg0_inner;
6923 tree inner_type, outer_type;
6925 if (!CONVERT_EXPR_P (arg0))
6926 return NULL_TREE;
6928 outer_type = TREE_TYPE (arg0);
6929 arg0_inner = TREE_OPERAND (arg0, 0);
6930 inner_type = TREE_TYPE (arg0_inner);
6932 #ifdef HAVE_canonicalize_funcptr_for_compare
6933 /* Disable this optimization if we're casting a function pointer
6934 type on targets that require function pointer canonicalization. */
6935 if (HAVE_canonicalize_funcptr_for_compare
6936 && TREE_CODE (inner_type) == POINTER_TYPE
6937 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6938 return NULL_TREE;
6939 #endif
6941 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6942 return NULL_TREE;
6944 if (TREE_CODE (arg1) != INTEGER_CST
6945 && !(CONVERT_EXPR_P (arg1)
6946 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6947 return NULL_TREE;
6949 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6950 && code != NE_EXPR
6951 && code != EQ_EXPR)
6952 return NULL_TREE;
6954 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6955 return NULL_TREE;
6957 if (TREE_CODE (arg1) == INTEGER_CST)
6958 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6959 0, TREE_OVERFLOW (arg1));
6960 else
6961 arg1 = fold_convert_loc (loc, inner_type, arg1);
6963 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6966 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6967 step of the array. Reconstructs s and delta in the case of s *
6968 delta being an integer constant (and thus already folded). ADDR is
6969 the address. MULT is the multiplicative expression. If the
6970 function succeeds, the new address expression is returned.
6971 Otherwise NULL_TREE is returned. LOC is the location of the
6972 resulting expression. */
6974 static tree
6975 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6977 tree s, delta, step;
6978 tree ref = TREE_OPERAND (addr, 0), pref;
6979 tree ret, pos;
6980 tree itype;
6981 bool mdim = false;
6983 /* Strip the nops that might be added when converting op1 to sizetype. */
6984 STRIP_NOPS (op1);
6986 /* Canonicalize op1 into a possibly non-constant delta
6987 and an INTEGER_CST s. */
6988 if (TREE_CODE (op1) == MULT_EXPR)
6990 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6992 STRIP_NOPS (arg0);
6993 STRIP_NOPS (arg1);
6995 if (TREE_CODE (arg0) == INTEGER_CST)
6997 s = arg0;
6998 delta = arg1;
7000 else if (TREE_CODE (arg1) == INTEGER_CST)
7002 s = arg1;
7003 delta = arg0;
7005 else
7006 return NULL_TREE;
7008 else if (TREE_CODE (op1) == INTEGER_CST)
7010 delta = op1;
7011 s = NULL_TREE;
7013 else
7015 /* Simulate we are delta * 1. */
7016 delta = op1;
7017 s = integer_one_node;
7020 /* Handle &x.array the same as we would handle &x.array[0]. */
7021 if (TREE_CODE (ref) == COMPONENT_REF
7022 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
7024 tree domain;
7026 /* Remember if this was a multi-dimensional array. */
7027 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7028 mdim = true;
7030 domain = TYPE_DOMAIN (TREE_TYPE (ref));
7031 if (! domain)
7032 goto cont;
7033 itype = TREE_TYPE (domain);
7035 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
7036 if (TREE_CODE (step) != INTEGER_CST)
7037 goto cont;
7039 if (s)
7041 if (! tree_int_cst_equal (step, s))
7042 goto cont;
7044 else
7046 /* Try if delta is a multiple of step. */
7047 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7048 if (! tmp)
7049 goto cont;
7050 delta = tmp;
7053 /* Only fold here if we can verify we do not overflow one
7054 dimension of a multi-dimensional array. */
7055 if (mdim)
7057 tree tmp;
7059 if (!TYPE_MIN_VALUE (domain)
7060 || !TYPE_MAX_VALUE (domain)
7061 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7062 goto cont;
7064 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7065 fold_convert_loc (loc, itype,
7066 TYPE_MIN_VALUE (domain)),
7067 fold_convert_loc (loc, itype, delta));
7068 if (TREE_CODE (tmp) != INTEGER_CST
7069 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7070 goto cont;
7073 /* We found a suitable component reference. */
7075 pref = TREE_OPERAND (addr, 0);
7076 ret = copy_node (pref);
7077 SET_EXPR_LOCATION (ret, loc);
7079 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7080 fold_build2_loc
7081 (loc, PLUS_EXPR, itype,
7082 fold_convert_loc (loc, itype,
7083 TYPE_MIN_VALUE
7084 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7085 fold_convert_loc (loc, itype, delta)),
7086 NULL_TREE, NULL_TREE);
7087 return build_fold_addr_expr_loc (loc, ret);
7090 cont:
7092 for (;; ref = TREE_OPERAND (ref, 0))
7094 if (TREE_CODE (ref) == ARRAY_REF)
7096 tree domain;
7098 /* Remember if this was a multi-dimensional array. */
7099 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7100 mdim = true;
7102 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7103 if (! domain)
7104 continue;
7105 itype = TREE_TYPE (domain);
7107 step = array_ref_element_size (ref);
7108 if (TREE_CODE (step) != INTEGER_CST)
7109 continue;
7111 if (s)
7113 if (! tree_int_cst_equal (step, s))
7114 continue;
7116 else
7118 /* Try if delta is a multiple of step. */
7119 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7120 if (! tmp)
7121 continue;
7122 delta = tmp;
7125 /* Only fold here if we can verify we do not overflow one
7126 dimension of a multi-dimensional array. */
7127 if (mdim)
7129 tree tmp;
7131 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7132 || !TYPE_MAX_VALUE (domain)
7133 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7134 continue;
7136 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7137 fold_convert_loc (loc, itype,
7138 TREE_OPERAND (ref, 1)),
7139 fold_convert_loc (loc, itype, delta));
7140 if (!tmp
7141 || TREE_CODE (tmp) != INTEGER_CST
7142 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7143 continue;
7146 break;
7148 else
7149 mdim = false;
7151 if (!handled_component_p (ref))
7152 return NULL_TREE;
7155 /* We found the suitable array reference. So copy everything up to it,
7156 and replace the index. */
7158 pref = TREE_OPERAND (addr, 0);
7159 ret = copy_node (pref);
7160 SET_EXPR_LOCATION (ret, loc);
7161 pos = ret;
7163 while (pref != ref)
7165 pref = TREE_OPERAND (pref, 0);
7166 TREE_OPERAND (pos, 0) = copy_node (pref);
7167 pos = TREE_OPERAND (pos, 0);
7170 TREE_OPERAND (pos, 1)
7171 = fold_build2_loc (loc, PLUS_EXPR, itype,
7172 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7173 fold_convert_loc (loc, itype, delta));
7174 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7178 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7179 means A >= Y && A != MAX, but in this case we know that
7180 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7182 static tree
7183 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7185 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7187 if (TREE_CODE (bound) == LT_EXPR)
7188 a = TREE_OPERAND (bound, 0);
7189 else if (TREE_CODE (bound) == GT_EXPR)
7190 a = TREE_OPERAND (bound, 1);
7191 else
7192 return NULL_TREE;
7194 typea = TREE_TYPE (a);
7195 if (!INTEGRAL_TYPE_P (typea)
7196 && !POINTER_TYPE_P (typea))
7197 return NULL_TREE;
7199 if (TREE_CODE (ineq) == LT_EXPR)
7201 a1 = TREE_OPERAND (ineq, 1);
7202 y = TREE_OPERAND (ineq, 0);
7204 else if (TREE_CODE (ineq) == GT_EXPR)
7206 a1 = TREE_OPERAND (ineq, 0);
7207 y = TREE_OPERAND (ineq, 1);
7209 else
7210 return NULL_TREE;
7212 if (TREE_TYPE (a1) != typea)
7213 return NULL_TREE;
7215 if (POINTER_TYPE_P (typea))
7217 /* Convert the pointer types into integer before taking the difference. */
7218 tree ta = fold_convert_loc (loc, ssizetype, a);
7219 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7220 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7222 else
7223 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7225 if (!diff || !integer_onep (diff))
7226 return NULL_TREE;
7228 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7231 /* Fold a sum or difference of at least one multiplication.
7232 Returns the folded tree or NULL if no simplification could be made. */
7234 static tree
7235 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7236 tree arg0, tree arg1)
7238 tree arg00, arg01, arg10, arg11;
7239 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7241 /* (A * C) +- (B * C) -> (A+-B) * C.
7242 (A * C) +- A -> A * (C+-1).
7243 We are most concerned about the case where C is a constant,
7244 but other combinations show up during loop reduction. Since
7245 it is not difficult, try all four possibilities. */
7247 if (TREE_CODE (arg0) == MULT_EXPR)
7249 arg00 = TREE_OPERAND (arg0, 0);
7250 arg01 = TREE_OPERAND (arg0, 1);
7252 else if (TREE_CODE (arg0) == INTEGER_CST)
7254 arg00 = build_one_cst (type);
7255 arg01 = arg0;
7257 else
7259 /* We cannot generate constant 1 for fract. */
7260 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7261 return NULL_TREE;
7262 arg00 = arg0;
7263 arg01 = build_one_cst (type);
7265 if (TREE_CODE (arg1) == MULT_EXPR)
7267 arg10 = TREE_OPERAND (arg1, 0);
7268 arg11 = TREE_OPERAND (arg1, 1);
7270 else if (TREE_CODE (arg1) == INTEGER_CST)
7272 arg10 = build_one_cst (type);
7273 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7274 the purpose of this canonicalization. */
7275 if (TREE_INT_CST_HIGH (arg1) == -1
7276 && negate_expr_p (arg1)
7277 && code == PLUS_EXPR)
7279 arg11 = negate_expr (arg1);
7280 code = MINUS_EXPR;
7282 else
7283 arg11 = arg1;
7285 else
7287 /* We cannot generate constant 1 for fract. */
7288 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7289 return NULL_TREE;
7290 arg10 = arg1;
7291 arg11 = build_one_cst (type);
7293 same = NULL_TREE;
7295 if (operand_equal_p (arg01, arg11, 0))
7296 same = arg01, alt0 = arg00, alt1 = arg10;
7297 else if (operand_equal_p (arg00, arg10, 0))
7298 same = arg00, alt0 = arg01, alt1 = arg11;
7299 else if (operand_equal_p (arg00, arg11, 0))
7300 same = arg00, alt0 = arg01, alt1 = arg10;
7301 else if (operand_equal_p (arg01, arg10, 0))
7302 same = arg01, alt0 = arg00, alt1 = arg11;
7304 /* No identical multiplicands; see if we can find a common
7305 power-of-two factor in non-power-of-two multiplies. This
7306 can help in multi-dimensional array access. */
7307 else if (tree_fits_shwi_p (arg01)
7308 && tree_fits_shwi_p (arg11))
7310 HOST_WIDE_INT int01, int11, tmp;
7311 bool swap = false;
7312 tree maybe_same;
7313 int01 = tree_to_shwi (arg01);
7314 int11 = tree_to_shwi (arg11);
7316 /* Move min of absolute values to int11. */
7317 if (absu_hwi (int01) < absu_hwi (int11))
7319 tmp = int01, int01 = int11, int11 = tmp;
7320 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7321 maybe_same = arg01;
7322 swap = true;
7324 else
7325 maybe_same = arg11;
7327 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7328 /* The remainder should not be a constant, otherwise we
7329 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7330 increased the number of multiplications necessary. */
7331 && TREE_CODE (arg10) != INTEGER_CST)
7333 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7334 build_int_cst (TREE_TYPE (arg00),
7335 int01 / int11));
7336 alt1 = arg10;
7337 same = maybe_same;
7338 if (swap)
7339 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7343 if (same)
7344 return fold_build2_loc (loc, MULT_EXPR, type,
7345 fold_build2_loc (loc, code, type,
7346 fold_convert_loc (loc, type, alt0),
7347 fold_convert_loc (loc, type, alt1)),
7348 fold_convert_loc (loc, type, same));
7350 return NULL_TREE;
7353 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7354 specified by EXPR into the buffer PTR of length LEN bytes.
7355 Return the number of bytes placed in the buffer, or zero
7356 upon failure. */
7358 static int
7359 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7361 tree type = TREE_TYPE (expr);
7362 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7363 int byte, offset, word, words;
7364 unsigned char value;
7366 if (total_bytes > len)
7367 return 0;
7368 words = total_bytes / UNITS_PER_WORD;
7370 for (byte = 0; byte < total_bytes; byte++)
7372 int bitpos = byte * BITS_PER_UNIT;
7373 if (bitpos < HOST_BITS_PER_WIDE_INT)
7374 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7375 else
7376 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7377 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7379 if (total_bytes > UNITS_PER_WORD)
7381 word = byte / UNITS_PER_WORD;
7382 if (WORDS_BIG_ENDIAN)
7383 word = (words - 1) - word;
7384 offset = word * UNITS_PER_WORD;
7385 if (BYTES_BIG_ENDIAN)
7386 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7387 else
7388 offset += byte % UNITS_PER_WORD;
7390 else
7391 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7392 ptr[offset] = value;
7394 return total_bytes;
7398 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7399 specified by EXPR into the buffer PTR of length LEN bytes.
7400 Return the number of bytes placed in the buffer, or zero
7401 upon failure. */
7403 static int
7404 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7406 tree type = TREE_TYPE (expr);
7407 enum machine_mode mode = TYPE_MODE (type);
7408 int total_bytes = GET_MODE_SIZE (mode);
7409 FIXED_VALUE_TYPE value;
7410 tree i_value, i_type;
7412 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7413 return 0;
7415 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7417 if (NULL_TREE == i_type
7418 || TYPE_PRECISION (i_type) != total_bytes)
7419 return 0;
7421 value = TREE_FIXED_CST (expr);
7422 i_value = double_int_to_tree (i_type, value.data);
7424 return native_encode_int (i_value, ptr, len);
7428 /* Subroutine of native_encode_expr. Encode the REAL_CST
7429 specified by EXPR into the buffer PTR of length LEN bytes.
7430 Return the number of bytes placed in the buffer, or zero
7431 upon failure. */
7433 static int
7434 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7436 tree type = TREE_TYPE (expr);
7437 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7438 int byte, offset, word, words, bitpos;
7439 unsigned char value;
7441 /* There are always 32 bits in each long, no matter the size of
7442 the hosts long. We handle floating point representations with
7443 up to 192 bits. */
7444 long tmp[6];
7446 if (total_bytes > len)
7447 return 0;
7448 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7450 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7452 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7453 bitpos += BITS_PER_UNIT)
7455 byte = (bitpos / BITS_PER_UNIT) & 3;
7456 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7458 if (UNITS_PER_WORD < 4)
7460 word = byte / UNITS_PER_WORD;
7461 if (WORDS_BIG_ENDIAN)
7462 word = (words - 1) - word;
7463 offset = word * UNITS_PER_WORD;
7464 if (BYTES_BIG_ENDIAN)
7465 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7466 else
7467 offset += byte % UNITS_PER_WORD;
7469 else
7470 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7471 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7473 return total_bytes;
7476 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7477 specified by EXPR into the buffer PTR of length LEN bytes.
7478 Return the number of bytes placed in the buffer, or zero
7479 upon failure. */
7481 static int
7482 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7484 int rsize, isize;
7485 tree part;
7487 part = TREE_REALPART (expr);
7488 rsize = native_encode_expr (part, ptr, len);
7489 if (rsize == 0)
7490 return 0;
7491 part = TREE_IMAGPART (expr);
7492 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7493 if (isize != rsize)
7494 return 0;
7495 return rsize + isize;
7499 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7500 specified by EXPR into the buffer PTR of length LEN bytes.
7501 Return the number of bytes placed in the buffer, or zero
7502 upon failure. */
7504 static int
7505 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7507 unsigned i, count;
7508 int size, offset;
7509 tree itype, elem;
7511 offset = 0;
7512 count = VECTOR_CST_NELTS (expr);
7513 itype = TREE_TYPE (TREE_TYPE (expr));
7514 size = GET_MODE_SIZE (TYPE_MODE (itype));
7515 for (i = 0; i < count; i++)
7517 elem = VECTOR_CST_ELT (expr, i);
7518 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7519 return 0;
7520 offset += size;
7522 return offset;
7526 /* Subroutine of native_encode_expr. Encode the STRING_CST
7527 specified by EXPR into the buffer PTR of length LEN bytes.
7528 Return the number of bytes placed in the buffer, or zero
7529 upon failure. */
7531 static int
7532 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7534 tree type = TREE_TYPE (expr);
7535 HOST_WIDE_INT total_bytes;
7537 if (TREE_CODE (type) != ARRAY_TYPE
7538 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7539 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7540 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7541 return 0;
7542 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7543 if (total_bytes > len)
7544 return 0;
7545 if (TREE_STRING_LENGTH (expr) < total_bytes)
7547 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7548 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7549 total_bytes - TREE_STRING_LENGTH (expr));
7551 else
7552 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7553 return total_bytes;
7557 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7558 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7559 buffer PTR of length LEN bytes. Return the number of bytes
7560 placed in the buffer, or zero upon failure. */
7563 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7565 switch (TREE_CODE (expr))
7567 case INTEGER_CST:
7568 return native_encode_int (expr, ptr, len);
7570 case REAL_CST:
7571 return native_encode_real (expr, ptr, len);
7573 case FIXED_CST:
7574 return native_encode_fixed (expr, ptr, len);
7576 case COMPLEX_CST:
7577 return native_encode_complex (expr, ptr, len);
7579 case VECTOR_CST:
7580 return native_encode_vector (expr, ptr, len);
7582 case STRING_CST:
7583 return native_encode_string (expr, ptr, len);
7585 default:
7586 return 0;
7591 /* Subroutine of native_interpret_expr. Interpret the contents of
7592 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7593 If the buffer cannot be interpreted, return NULL_TREE. */
7595 static tree
7596 native_interpret_int (tree type, const unsigned char *ptr, int len)
7598 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7599 double_int result;
7601 if (total_bytes > len
7602 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7603 return NULL_TREE;
7605 result = double_int::from_buffer (ptr, total_bytes);
7607 return double_int_to_tree (type, result);
7611 /* Subroutine of native_interpret_expr. Interpret the contents of
7612 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7613 If the buffer cannot be interpreted, return NULL_TREE. */
7615 static tree
7616 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7618 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7619 double_int result;
7620 FIXED_VALUE_TYPE fixed_value;
7622 if (total_bytes > len
7623 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7624 return NULL_TREE;
7626 result = double_int::from_buffer (ptr, total_bytes);
7627 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7629 return build_fixed (type, fixed_value);
7633 /* Subroutine of native_interpret_expr. Interpret the contents of
7634 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7635 If the buffer cannot be interpreted, return NULL_TREE. */
7637 static tree
7638 native_interpret_real (tree type, const unsigned char *ptr, int len)
7640 enum machine_mode mode = TYPE_MODE (type);
7641 int total_bytes = GET_MODE_SIZE (mode);
7642 int byte, offset, word, words, bitpos;
7643 unsigned char value;
7644 /* There are always 32 bits in each long, no matter the size of
7645 the hosts long. We handle floating point representations with
7646 up to 192 bits. */
7647 REAL_VALUE_TYPE r;
7648 long tmp[6];
7650 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7651 if (total_bytes > len || total_bytes > 24)
7652 return NULL_TREE;
7653 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7655 memset (tmp, 0, sizeof (tmp));
7656 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7657 bitpos += BITS_PER_UNIT)
7659 byte = (bitpos / BITS_PER_UNIT) & 3;
7660 if (UNITS_PER_WORD < 4)
7662 word = byte / UNITS_PER_WORD;
7663 if (WORDS_BIG_ENDIAN)
7664 word = (words - 1) - word;
7665 offset = word * UNITS_PER_WORD;
7666 if (BYTES_BIG_ENDIAN)
7667 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7668 else
7669 offset += byte % UNITS_PER_WORD;
7671 else
7672 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7673 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7675 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7678 real_from_target (&r, tmp, mode);
7679 return build_real (type, r);
7683 /* Subroutine of native_interpret_expr. Interpret the contents of
7684 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7685 If the buffer cannot be interpreted, return NULL_TREE. */
7687 static tree
7688 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7690 tree etype, rpart, ipart;
7691 int size;
7693 etype = TREE_TYPE (type);
7694 size = GET_MODE_SIZE (TYPE_MODE (etype));
7695 if (size * 2 > len)
7696 return NULL_TREE;
7697 rpart = native_interpret_expr (etype, ptr, size);
7698 if (!rpart)
7699 return NULL_TREE;
7700 ipart = native_interpret_expr (etype, ptr+size, size);
7701 if (!ipart)
7702 return NULL_TREE;
7703 return build_complex (type, rpart, ipart);
7707 /* Subroutine of native_interpret_expr. Interpret the contents of
7708 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7709 If the buffer cannot be interpreted, return NULL_TREE. */
7711 static tree
7712 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7714 tree etype, elem;
7715 int i, size, count;
7716 tree *elements;
7718 etype = TREE_TYPE (type);
7719 size = GET_MODE_SIZE (TYPE_MODE (etype));
7720 count = TYPE_VECTOR_SUBPARTS (type);
7721 if (size * count > len)
7722 return NULL_TREE;
7724 elements = XALLOCAVEC (tree, count);
7725 for (i = count - 1; i >= 0; i--)
7727 elem = native_interpret_expr (etype, ptr+(i*size), size);
7728 if (!elem)
7729 return NULL_TREE;
7730 elements[i] = elem;
7732 return build_vector (type, elements);
7736 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7737 the buffer PTR of length LEN as a constant of type TYPE. For
7738 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7739 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7740 return NULL_TREE. */
7742 tree
7743 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7745 switch (TREE_CODE (type))
7747 case INTEGER_TYPE:
7748 case ENUMERAL_TYPE:
7749 case BOOLEAN_TYPE:
7750 case POINTER_TYPE:
7751 case REFERENCE_TYPE:
7752 return native_interpret_int (type, ptr, len);
7754 case REAL_TYPE:
7755 return native_interpret_real (type, ptr, len);
7757 case FIXED_POINT_TYPE:
7758 return native_interpret_fixed (type, ptr, len);
7760 case COMPLEX_TYPE:
7761 return native_interpret_complex (type, ptr, len);
7763 case VECTOR_TYPE:
7764 return native_interpret_vector (type, ptr, len);
7766 default:
7767 return NULL_TREE;
7771 /* Returns true if we can interpret the contents of a native encoding
7772 as TYPE. */
7774 static bool
7775 can_native_interpret_type_p (tree type)
7777 switch (TREE_CODE (type))
7779 case INTEGER_TYPE:
7780 case ENUMERAL_TYPE:
7781 case BOOLEAN_TYPE:
7782 case POINTER_TYPE:
7783 case REFERENCE_TYPE:
7784 case FIXED_POINT_TYPE:
7785 case REAL_TYPE:
7786 case COMPLEX_TYPE:
7787 case VECTOR_TYPE:
7788 return true;
7789 default:
7790 return false;
7794 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7795 TYPE at compile-time. If we're unable to perform the conversion
7796 return NULL_TREE. */
7798 static tree
7799 fold_view_convert_expr (tree type, tree expr)
7801 /* We support up to 512-bit values (for V8DFmode). */
7802 unsigned char buffer[64];
7803 int len;
7805 /* Check that the host and target are sane. */
7806 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7807 return NULL_TREE;
7809 len = native_encode_expr (expr, buffer, sizeof (buffer));
7810 if (len == 0)
7811 return NULL_TREE;
7813 return native_interpret_expr (type, buffer, len);
7816 /* Build an expression for the address of T. Folds away INDIRECT_REF
7817 to avoid confusing the gimplify process. */
7819 tree
7820 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7822 /* The size of the object is not relevant when talking about its address. */
7823 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7824 t = TREE_OPERAND (t, 0);
7826 if (TREE_CODE (t) == INDIRECT_REF)
7828 t = TREE_OPERAND (t, 0);
7830 if (TREE_TYPE (t) != ptrtype)
7831 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7833 else if (TREE_CODE (t) == MEM_REF
7834 && integer_zerop (TREE_OPERAND (t, 1)))
7835 return TREE_OPERAND (t, 0);
7836 else if (TREE_CODE (t) == MEM_REF
7837 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7838 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7839 TREE_OPERAND (t, 0),
7840 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7841 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7843 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7845 if (TREE_TYPE (t) != ptrtype)
7846 t = fold_convert_loc (loc, ptrtype, t);
7848 else
7849 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7851 return t;
7854 /* Build an expression for the address of T. */
7856 tree
7857 build_fold_addr_expr_loc (location_t loc, tree t)
7859 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7861 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7864 static bool vec_cst_ctor_to_array (tree, tree *);
7866 /* Fold a unary expression of code CODE and type TYPE with operand
7867 OP0. Return the folded expression if folding is successful.
7868 Otherwise, return NULL_TREE. */
7870 static tree
7871 fold_unary_loc_1 (location_t loc, enum tree_code code, tree type, tree op0)
7873 tree tem;
7874 tree arg0;
7875 enum tree_code_class kind = TREE_CODE_CLASS (code);
7877 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7878 && TREE_CODE_LENGTH (code) == 1);
7880 arg0 = op0;
7881 if (arg0)
7883 if (CONVERT_EXPR_CODE_P (code)
7884 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7886 /* Don't use STRIP_NOPS, because signedness of argument type
7887 matters. */
7888 STRIP_SIGN_NOPS (arg0);
7890 else
7892 /* Strip any conversions that don't change the mode. This
7893 is safe for every expression, except for a comparison
7894 expression because its signedness is derived from its
7895 operands.
7897 Note that this is done as an internal manipulation within
7898 the constant folder, in order to find the simplest
7899 representation of the arguments so that their form can be
7900 studied. In any cases, the appropriate type conversions
7901 should be put back in the tree that will get out of the
7902 constant folder. */
7903 STRIP_NOPS (arg0);
7907 if (TREE_CODE_CLASS (code) == tcc_unary)
7909 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7910 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7911 fold_build1_loc (loc, code, type,
7912 fold_convert_loc (loc, TREE_TYPE (op0),
7913 TREE_OPERAND (arg0, 1))));
7914 else if (TREE_CODE (arg0) == COND_EXPR)
7916 tree arg01 = TREE_OPERAND (arg0, 1);
7917 tree arg02 = TREE_OPERAND (arg0, 2);
7918 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7919 arg01 = fold_build1_loc (loc, code, type,
7920 fold_convert_loc (loc,
7921 TREE_TYPE (op0), arg01));
7922 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7923 arg02 = fold_build1_loc (loc, code, type,
7924 fold_convert_loc (loc,
7925 TREE_TYPE (op0), arg02));
7926 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7927 arg01, arg02);
7929 /* If this was a conversion, and all we did was to move into
7930 inside the COND_EXPR, bring it back out. But leave it if
7931 it is a conversion from integer to integer and the
7932 result precision is no wider than a word since such a
7933 conversion is cheap and may be optimized away by combine,
7934 while it couldn't if it were outside the COND_EXPR. Then return
7935 so we don't get into an infinite recursion loop taking the
7936 conversion out and then back in. */
7938 if ((CONVERT_EXPR_CODE_P (code)
7939 || code == NON_LVALUE_EXPR)
7940 && TREE_CODE (tem) == COND_EXPR
7941 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7942 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7943 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7944 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7945 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7946 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7947 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7948 && (INTEGRAL_TYPE_P
7949 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7950 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7951 || flag_syntax_only))
7952 tem = build1_loc (loc, code, type,
7953 build3 (COND_EXPR,
7954 TREE_TYPE (TREE_OPERAND
7955 (TREE_OPERAND (tem, 1), 0)),
7956 TREE_OPERAND (tem, 0),
7957 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7958 TREE_OPERAND (TREE_OPERAND (tem, 2),
7959 0)));
7960 return tem;
7964 switch (code)
7966 case PAREN_EXPR:
7967 /* Re-association barriers around constants and other re-association
7968 barriers can be removed. */
7969 if (CONSTANT_CLASS_P (op0)
7970 || TREE_CODE (op0) == PAREN_EXPR)
7971 return fold_convert_loc (loc, type, op0);
7972 return NULL_TREE;
7974 CASE_CONVERT:
7975 case FLOAT_EXPR:
7976 case FIX_TRUNC_EXPR:
7977 if (TREE_TYPE (op0) == type)
7978 return op0;
7980 if (COMPARISON_CLASS_P (op0))
7982 /* If we have (type) (a CMP b) and type is an integral type, return
7983 new expression involving the new type. Canonicalize
7984 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7985 non-integral type.
7986 Do not fold the result as that would not simplify further, also
7987 folding again results in recursions. */
7988 if (TREE_CODE (type) == BOOLEAN_TYPE)
7989 return build2_loc (loc, TREE_CODE (op0), type,
7990 TREE_OPERAND (op0, 0),
7991 TREE_OPERAND (op0, 1));
7992 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7993 && TREE_CODE (type) != VECTOR_TYPE)
7994 return build3_loc (loc, COND_EXPR, type, op0,
7995 constant_boolean_node (true, type),
7996 constant_boolean_node (false, type));
7999 /* Handle cases of two conversions in a row. */
8000 if (CONVERT_EXPR_P (op0))
8002 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8003 tree inter_type = TREE_TYPE (op0);
8004 int inside_int = INTEGRAL_TYPE_P (inside_type);
8005 int inside_ptr = POINTER_TYPE_P (inside_type);
8006 int inside_float = FLOAT_TYPE_P (inside_type);
8007 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8008 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8009 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8010 int inter_int = INTEGRAL_TYPE_P (inter_type);
8011 int inter_ptr = POINTER_TYPE_P (inter_type);
8012 int inter_float = FLOAT_TYPE_P (inter_type);
8013 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8014 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8015 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8016 int final_int = INTEGRAL_TYPE_P (type);
8017 int final_ptr = POINTER_TYPE_P (type);
8018 int final_float = FLOAT_TYPE_P (type);
8019 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8020 unsigned int final_prec = TYPE_PRECISION (type);
8021 int final_unsignedp = TYPE_UNSIGNED (type);
8023 /* In addition to the cases of two conversions in a row
8024 handled below, if we are converting something to its own
8025 type via an object of identical or wider precision, neither
8026 conversion is needed. */
8027 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8028 && (((inter_int || inter_ptr) && final_int)
8029 || (inter_float && final_float))
8030 && inter_prec >= final_prec)
8031 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8033 /* Likewise, if the intermediate and initial types are either both
8034 float or both integer, we don't need the middle conversion if the
8035 former is wider than the latter and doesn't change the signedness
8036 (for integers). Avoid this if the final type is a pointer since
8037 then we sometimes need the middle conversion. Likewise if the
8038 final type has a precision not equal to the size of its mode. */
8039 if (((inter_int && inside_int)
8040 || (inter_float && inside_float)
8041 || (inter_vec && inside_vec))
8042 && inter_prec >= inside_prec
8043 && (inter_float || inter_vec
8044 || inter_unsignedp == inside_unsignedp)
8045 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8046 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8047 && ! final_ptr
8048 && (! final_vec || inter_prec == inside_prec))
8049 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8051 /* If we have a sign-extension of a zero-extended value, we can
8052 replace that by a single zero-extension. Likewise if the
8053 final conversion does not change precision we can drop the
8054 intermediate conversion. */
8055 if (inside_int && inter_int && final_int
8056 && ((inside_prec < inter_prec && inter_prec < final_prec
8057 && inside_unsignedp && !inter_unsignedp)
8058 || final_prec == inter_prec))
8059 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8061 /* Two conversions in a row are not needed unless:
8062 - some conversion is floating-point (overstrict for now), or
8063 - some conversion is a vector (overstrict for now), or
8064 - the intermediate type is narrower than both initial and
8065 final, or
8066 - the intermediate type and innermost type differ in signedness,
8067 and the outermost type is wider than the intermediate, or
8068 - the initial type is a pointer type and the precisions of the
8069 intermediate and final types differ, or
8070 - the final type is a pointer type and the precisions of the
8071 initial and intermediate types differ. */
8072 if (! inside_float && ! inter_float && ! final_float
8073 && ! inside_vec && ! inter_vec && ! final_vec
8074 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8075 && ! (inside_int && inter_int
8076 && inter_unsignedp != inside_unsignedp
8077 && inter_prec < final_prec)
8078 && ((inter_unsignedp && inter_prec > inside_prec)
8079 == (final_unsignedp && final_prec > inter_prec))
8080 && ! (inside_ptr && inter_prec != final_prec)
8081 && ! (final_ptr && inside_prec != inter_prec)
8082 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8083 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8084 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8087 /* Handle (T *)&A.B.C for A being of type T and B and C
8088 living at offset zero. This occurs frequently in
8089 C++ upcasting and then accessing the base. */
8090 if (TREE_CODE (op0) == ADDR_EXPR
8091 && POINTER_TYPE_P (type)
8092 && handled_component_p (TREE_OPERAND (op0, 0)))
8094 HOST_WIDE_INT bitsize, bitpos;
8095 tree offset;
8096 enum machine_mode mode;
8097 int unsignedp, volatilep;
8098 tree base = TREE_OPERAND (op0, 0);
8099 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8100 &mode, &unsignedp, &volatilep, false);
8101 /* If the reference was to a (constant) zero offset, we can use
8102 the address of the base if it has the same base type
8103 as the result type and the pointer type is unqualified. */
8104 if (! offset && bitpos == 0
8105 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8106 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8107 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8108 return fold_convert_loc (loc, type,
8109 build_fold_addr_expr_loc (loc, base));
8112 if (TREE_CODE (op0) == MODIFY_EXPR
8113 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8114 /* Detect assigning a bitfield. */
8115 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8116 && DECL_BIT_FIELD
8117 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8119 /* Don't leave an assignment inside a conversion
8120 unless assigning a bitfield. */
8121 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8122 /* First do the assignment, then return converted constant. */
8123 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8124 TREE_NO_WARNING (tem) = 1;
8125 TREE_USED (tem) = 1;
8126 return tem;
8129 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8130 constants (if x has signed type, the sign bit cannot be set
8131 in c). This folds extension into the BIT_AND_EXPR.
8132 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8133 very likely don't have maximal range for their precision and this
8134 transformation effectively doesn't preserve non-maximal ranges. */
8135 if (TREE_CODE (type) == INTEGER_TYPE
8136 && TREE_CODE (op0) == BIT_AND_EXPR
8137 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8139 tree and_expr = op0;
8140 tree and0 = TREE_OPERAND (and_expr, 0);
8141 tree and1 = TREE_OPERAND (and_expr, 1);
8142 int change = 0;
8144 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8145 || (TYPE_PRECISION (type)
8146 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8147 change = 1;
8148 else if (TYPE_PRECISION (TREE_TYPE (and1))
8149 <= HOST_BITS_PER_WIDE_INT
8150 && tree_fits_uhwi_p (and1))
8152 unsigned HOST_WIDE_INT cst;
8154 cst = tree_to_uhwi (and1);
8155 cst &= HOST_WIDE_INT_M1U
8156 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8157 change = (cst == 0);
8158 #ifdef LOAD_EXTEND_OP
8159 if (change
8160 && !flag_syntax_only
8161 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8162 == ZERO_EXTEND))
8164 tree uns = unsigned_type_for (TREE_TYPE (and0));
8165 and0 = fold_convert_loc (loc, uns, and0);
8166 and1 = fold_convert_loc (loc, uns, and1);
8168 #endif
8170 if (change)
8172 tem = force_fit_type_double (type, tree_to_double_int (and1),
8173 0, TREE_OVERFLOW (and1));
8174 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8175 fold_convert_loc (loc, type, and0), tem);
8179 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8180 when one of the new casts will fold away. Conservatively we assume
8181 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8182 if (POINTER_TYPE_P (type)
8183 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8184 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8185 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8186 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8187 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8189 tree arg00 = TREE_OPERAND (arg0, 0);
8190 tree arg01 = TREE_OPERAND (arg0, 1);
8192 return fold_build_pointer_plus_loc
8193 (loc, fold_convert_loc (loc, type, arg00), arg01);
8196 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8197 of the same precision, and X is an integer type not narrower than
8198 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8199 if (INTEGRAL_TYPE_P (type)
8200 && TREE_CODE (op0) == BIT_NOT_EXPR
8201 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8202 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8203 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8205 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8206 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8207 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8208 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8209 fold_convert_loc (loc, type, tem));
8212 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8213 type of X and Y (integer types only). */
8214 if (INTEGRAL_TYPE_P (type)
8215 && TREE_CODE (op0) == MULT_EXPR
8216 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8217 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8219 /* Be careful not to introduce new overflows. */
8220 tree mult_type;
8221 if (TYPE_OVERFLOW_WRAPS (type))
8222 mult_type = type;
8223 else
8224 mult_type = unsigned_type_for (type);
8226 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8228 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8229 fold_convert_loc (loc, mult_type,
8230 TREE_OPERAND (op0, 0)),
8231 fold_convert_loc (loc, mult_type,
8232 TREE_OPERAND (op0, 1)));
8233 return fold_convert_loc (loc, type, tem);
8237 tem = fold_convert_const (code, type, op0);
8238 return tem ? tem : NULL_TREE;
8240 case ADDR_SPACE_CONVERT_EXPR:
8241 if (integer_zerop (arg0))
8242 return fold_convert_const (code, type, arg0);
8243 return NULL_TREE;
8245 case FIXED_CONVERT_EXPR:
8246 tem = fold_convert_const (code, type, arg0);
8247 return tem ? tem : NULL_TREE;
8249 case VIEW_CONVERT_EXPR:
8250 if (TREE_TYPE (op0) == type)
8251 return op0;
8252 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8253 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8254 type, TREE_OPERAND (op0, 0));
8255 if (TREE_CODE (op0) == MEM_REF)
8256 return fold_build2_loc (loc, MEM_REF, type,
8257 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8259 /* For integral conversions with the same precision or pointer
8260 conversions use a NOP_EXPR instead. */
8261 if ((INTEGRAL_TYPE_P (type)
8262 || POINTER_TYPE_P (type))
8263 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8264 || POINTER_TYPE_P (TREE_TYPE (op0)))
8265 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8266 return fold_convert_loc (loc, type, op0);
8268 /* Strip inner integral conversions that do not change the precision. */
8269 if (CONVERT_EXPR_P (op0)
8270 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8271 || POINTER_TYPE_P (TREE_TYPE (op0)))
8272 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8273 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8274 && (TYPE_PRECISION (TREE_TYPE (op0))
8275 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8276 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8277 type, TREE_OPERAND (op0, 0));
8279 return fold_view_convert_expr (type, op0);
8281 case NEGATE_EXPR:
8282 tem = fold_negate_expr (loc, arg0);
8283 if (tem)
8284 return fold_convert_loc (loc, type, tem);
8285 return NULL_TREE;
8287 case ABS_EXPR:
8288 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8289 return fold_abs_const (arg0, type);
8290 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8291 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8292 /* Convert fabs((double)float) into (double)fabsf(float). */
8293 else if (TREE_CODE (arg0) == NOP_EXPR
8294 && TREE_CODE (type) == REAL_TYPE)
8296 tree targ0 = strip_float_extensions (arg0);
8297 if (targ0 != arg0)
8298 return fold_convert_loc (loc, type,
8299 fold_build1_loc (loc, ABS_EXPR,
8300 TREE_TYPE (targ0),
8301 targ0));
8303 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8304 else if (TREE_CODE (arg0) == ABS_EXPR)
8305 return arg0;
8306 else if (tree_expr_nonnegative_p (arg0))
8307 return arg0;
8309 /* Strip sign ops from argument. */
8310 if (TREE_CODE (type) == REAL_TYPE)
8312 tem = fold_strip_sign_ops (arg0);
8313 if (tem)
8314 return fold_build1_loc (loc, ABS_EXPR, type,
8315 fold_convert_loc (loc, type, tem));
8317 return NULL_TREE;
8319 case CONJ_EXPR:
8320 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8321 return fold_convert_loc (loc, type, arg0);
8322 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8324 tree itype = TREE_TYPE (type);
8325 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8326 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8327 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8328 negate_expr (ipart));
8330 if (TREE_CODE (arg0) == COMPLEX_CST)
8332 tree itype = TREE_TYPE (type);
8333 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8334 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8335 return build_complex (type, rpart, negate_expr (ipart));
8337 if (TREE_CODE (arg0) == CONJ_EXPR)
8338 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8339 return NULL_TREE;
8341 case BIT_NOT_EXPR:
8342 if (TREE_CODE (arg0) == INTEGER_CST)
8343 return fold_not_const (arg0, type);
8344 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8345 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8346 /* Convert ~ (-A) to A - 1. */
8347 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8348 return fold_build2_loc (loc, MINUS_EXPR, type,
8349 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8350 build_int_cst (type, 1));
8351 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8352 else if (INTEGRAL_TYPE_P (type)
8353 && ((TREE_CODE (arg0) == MINUS_EXPR
8354 && integer_onep (TREE_OPERAND (arg0, 1)))
8355 || (TREE_CODE (arg0) == PLUS_EXPR
8356 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8357 return fold_build1_loc (loc, NEGATE_EXPR, type,
8358 fold_convert_loc (loc, type,
8359 TREE_OPERAND (arg0, 0)));
8360 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8361 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8362 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8363 fold_convert_loc (loc, type,
8364 TREE_OPERAND (arg0, 0)))))
8365 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8366 fold_convert_loc (loc, type,
8367 TREE_OPERAND (arg0, 1)));
8368 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8369 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8370 fold_convert_loc (loc, type,
8371 TREE_OPERAND (arg0, 1)))))
8372 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8373 fold_convert_loc (loc, type,
8374 TREE_OPERAND (arg0, 0)), tem);
8375 /* Perform BIT_NOT_EXPR on each element individually. */
8376 else if (TREE_CODE (arg0) == VECTOR_CST)
8378 tree *elements;
8379 tree elem;
8380 unsigned count = VECTOR_CST_NELTS (arg0), i;
8382 elements = XALLOCAVEC (tree, count);
8383 for (i = 0; i < count; i++)
8385 elem = VECTOR_CST_ELT (arg0, i);
8386 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8387 if (elem == NULL_TREE)
8388 break;
8389 elements[i] = elem;
8391 if (i == count)
8392 return build_vector (type, elements);
8394 else if (COMPARISON_CLASS_P (arg0)
8395 && (VECTOR_TYPE_P (type)
8396 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8398 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8399 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8400 HONOR_NANS (TYPE_MODE (op_type)));
8401 if (subcode != ERROR_MARK)
8402 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8403 TREE_OPERAND (arg0, 1));
8407 return NULL_TREE;
8409 case TRUTH_NOT_EXPR:
8410 /* Note that the operand of this must be an int
8411 and its values must be 0 or 1.
8412 ("true" is a fixed value perhaps depending on the language,
8413 but we don't handle values other than 1 correctly yet.) */
8414 tem = fold_truth_not_expr (loc, arg0);
8415 if (!tem)
8416 return NULL_TREE;
8417 return fold_convert_loc (loc, type, tem);
8419 case REALPART_EXPR:
8420 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8421 return fold_convert_loc (loc, type, arg0);
8422 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8423 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8424 TREE_OPERAND (arg0, 1));
8425 if (TREE_CODE (arg0) == COMPLEX_CST)
8426 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8427 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8429 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8430 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8431 fold_build1_loc (loc, REALPART_EXPR, itype,
8432 TREE_OPERAND (arg0, 0)),
8433 fold_build1_loc (loc, REALPART_EXPR, itype,
8434 TREE_OPERAND (arg0, 1)));
8435 return fold_convert_loc (loc, type, tem);
8437 if (TREE_CODE (arg0) == CONJ_EXPR)
8439 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8440 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8441 TREE_OPERAND (arg0, 0));
8442 return fold_convert_loc (loc, type, tem);
8444 if (TREE_CODE (arg0) == CALL_EXPR)
8446 tree fn = get_callee_fndecl (arg0);
8447 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8448 switch (DECL_FUNCTION_CODE (fn))
8450 CASE_FLT_FN (BUILT_IN_CEXPI):
8451 fn = mathfn_built_in (type, BUILT_IN_COS);
8452 if (fn)
8453 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8454 break;
8456 default:
8457 break;
8460 return NULL_TREE;
8462 case IMAGPART_EXPR:
8463 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8464 return build_zero_cst (type);
8465 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8466 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8467 TREE_OPERAND (arg0, 0));
8468 if (TREE_CODE (arg0) == COMPLEX_CST)
8469 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8470 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8472 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8473 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8474 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8475 TREE_OPERAND (arg0, 0)),
8476 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8477 TREE_OPERAND (arg0, 1)));
8478 return fold_convert_loc (loc, type, tem);
8480 if (TREE_CODE (arg0) == CONJ_EXPR)
8482 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8483 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8484 return fold_convert_loc (loc, type, negate_expr (tem));
8486 if (TREE_CODE (arg0) == CALL_EXPR)
8488 tree fn = get_callee_fndecl (arg0);
8489 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8490 switch (DECL_FUNCTION_CODE (fn))
8492 CASE_FLT_FN (BUILT_IN_CEXPI):
8493 fn = mathfn_built_in (type, BUILT_IN_SIN);
8494 if (fn)
8495 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8496 break;
8498 default:
8499 break;
8502 return NULL_TREE;
8504 case INDIRECT_REF:
8505 /* Fold *&X to X if X is an lvalue. */
8506 if (TREE_CODE (op0) == ADDR_EXPR)
8508 tree op00 = TREE_OPERAND (op0, 0);
8509 if ((TREE_CODE (op00) == VAR_DECL
8510 || TREE_CODE (op00) == PARM_DECL
8511 || TREE_CODE (op00) == RESULT_DECL)
8512 && !TREE_READONLY (op00))
8513 return op00;
8515 return NULL_TREE;
8517 case VEC_UNPACK_LO_EXPR:
8518 case VEC_UNPACK_HI_EXPR:
8519 case VEC_UNPACK_FLOAT_LO_EXPR:
8520 case VEC_UNPACK_FLOAT_HI_EXPR:
8522 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8523 tree *elts;
8524 enum tree_code subcode;
8526 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8527 if (TREE_CODE (arg0) != VECTOR_CST)
8528 return NULL_TREE;
8530 elts = XALLOCAVEC (tree, nelts * 2);
8531 if (!vec_cst_ctor_to_array (arg0, elts))
8532 return NULL_TREE;
8534 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8535 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8536 elts += nelts;
8538 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8539 subcode = NOP_EXPR;
8540 else
8541 subcode = FLOAT_EXPR;
8543 for (i = 0; i < nelts; i++)
8545 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8546 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8547 return NULL_TREE;
8550 return build_vector (type, elts);
8553 case REDUC_MIN_EXPR:
8554 case REDUC_MAX_EXPR:
8555 case REDUC_PLUS_EXPR:
8557 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8558 tree *elts;
8559 enum tree_code subcode;
8561 if (TREE_CODE (op0) != VECTOR_CST)
8562 return NULL_TREE;
8564 elts = XALLOCAVEC (tree, nelts);
8565 if (!vec_cst_ctor_to_array (op0, elts))
8566 return NULL_TREE;
8568 switch (code)
8570 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8571 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8572 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8573 default: gcc_unreachable ();
8576 for (i = 1; i < nelts; i++)
8578 elts[0] = const_binop (subcode, elts[0], elts[i]);
8579 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8580 return NULL_TREE;
8581 elts[i] = build_zero_cst (TREE_TYPE (type));
8584 return build_vector (type, elts);
8587 default:
8588 return NULL_TREE;
8589 } /* switch (code) */
8592 /* Given an expression tree EXP, set the EXPR_FOLDED flag, and if it is
8593 a nop, recursively set the EXPR_FOLDED flag of its operand. */
8595 static void
8596 set_expr_folded_flag (tree exp)
8598 /* FIXME -- can not set the flag on SSA_NAME, the flag overlaps
8599 with the version member. */
8600 if (TREE_CODE (exp) == SSA_NAME)
8601 return;
8603 EXPR_FOLDED (exp) = 1;
8605 /* If EXP is a nop (i.e. NON_LVALUE_EXPRs and NOP_EXPRs), we need to
8606 recursively set the EXPR_FOLDED flag of its operand because the
8607 expression will be stripped later. */
8608 while ((CONVERT_EXPR_P (exp)
8609 || TREE_CODE (exp) == NON_LVALUE_EXPR)
8610 && TREE_OPERAND (exp, 0) != error_mark_node)
8612 exp = TREE_OPERAND (exp, 0);
8613 /* FIXME -- can not set the flag on SSA_NAME, the flag overlaps
8614 with the version member. */
8615 if (TREE_CODE (exp) != SSA_NAME)
8616 EXPR_FOLDED (exp) = 1;
8620 /* Fold a unary expression of code CODE and type TYPE with operand
8621 OP0. Return the folded expression if folding is successful.
8622 Otherwise, return NULL_TREE.
8623 This is a wrapper around fold_unary_1 function (which does the
8624 actual folding). Set the EXPR_FOLDED flag of the folded expression
8625 if folding is successful. */
8627 tree
8628 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8630 tree tem = fold_unary_loc_1 (loc, code, type, op0);
8631 if (tem)
8632 set_expr_folded_flag (tem);
8633 return tem;
8636 /* If the operation was a conversion do _not_ mark a resulting constant
8637 with TREE_OVERFLOW if the original constant was not. These conversions
8638 have implementation defined behavior and retaining the TREE_OVERFLOW
8639 flag here would confuse later passes such as VRP. */
8640 tree
8641 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8642 tree type, tree op0)
8644 tree res = fold_unary_loc (loc, code, type, op0);
8645 if (res
8646 && TREE_CODE (res) == INTEGER_CST
8647 && TREE_CODE (op0) == INTEGER_CST
8648 && CONVERT_EXPR_CODE_P (code))
8649 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8651 return res;
8654 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8655 operands OP0 and OP1. LOC is the location of the resulting expression.
8656 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8657 Return the folded expression if folding is successful. Otherwise,
8658 return NULL_TREE. */
8659 static tree
8660 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8661 tree arg0, tree arg1, tree op0, tree op1)
8663 tree tem;
8665 /* We only do these simplifications if we are optimizing. */
8666 if (!optimize)
8667 return NULL_TREE;
8669 /* Check for things like (A || B) && (A || C). We can convert this
8670 to A || (B && C). Note that either operator can be any of the four
8671 truth and/or operations and the transformation will still be
8672 valid. Also note that we only care about order for the
8673 ANDIF and ORIF operators. If B contains side effects, this
8674 might change the truth-value of A. */
8675 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8676 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8677 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8678 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8679 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8680 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8682 tree a00 = TREE_OPERAND (arg0, 0);
8683 tree a01 = TREE_OPERAND (arg0, 1);
8684 tree a10 = TREE_OPERAND (arg1, 0);
8685 tree a11 = TREE_OPERAND (arg1, 1);
8686 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8687 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8688 && (code == TRUTH_AND_EXPR
8689 || code == TRUTH_OR_EXPR));
8691 if (operand_equal_p (a00, a10, 0))
8692 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8693 fold_build2_loc (loc, code, type, a01, a11));
8694 else if (commutative && operand_equal_p (a00, a11, 0))
8695 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8696 fold_build2_loc (loc, code, type, a01, a10));
8697 else if (commutative && operand_equal_p (a01, a10, 0))
8698 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8699 fold_build2_loc (loc, code, type, a00, a11));
8701 /* This case if tricky because we must either have commutative
8702 operators or else A10 must not have side-effects. */
8704 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8705 && operand_equal_p (a01, a11, 0))
8706 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8707 fold_build2_loc (loc, code, type, a00, a10),
8708 a01);
8711 /* See if we can build a range comparison. */
8712 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8713 return tem;
8715 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8716 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8718 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8719 if (tem)
8720 return fold_build2_loc (loc, code, type, tem, arg1);
8723 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8724 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8726 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8727 if (tem)
8728 return fold_build2_loc (loc, code, type, arg0, tem);
8731 /* Check for the possibility of merging component references. If our
8732 lhs is another similar operation, try to merge its rhs with our
8733 rhs. Then try to merge our lhs and rhs. */
8734 if (TREE_CODE (arg0) == code
8735 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8736 TREE_OPERAND (arg0, 1), arg1)))
8737 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8739 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8740 return tem;
8742 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8743 && (code == TRUTH_AND_EXPR
8744 || code == TRUTH_ANDIF_EXPR
8745 || code == TRUTH_OR_EXPR
8746 || code == TRUTH_ORIF_EXPR))
8748 enum tree_code ncode, icode;
8750 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8751 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8752 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8754 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8755 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8756 We don't want to pack more than two leafs to a non-IF AND/OR
8757 expression.
8758 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8759 equal to IF-CODE, then we don't want to add right-hand operand.
8760 If the inner right-hand side of left-hand operand has
8761 side-effects, or isn't simple, then we can't add to it,
8762 as otherwise we might destroy if-sequence. */
8763 if (TREE_CODE (arg0) == icode
8764 && simple_operand_p_2 (arg1)
8765 /* Needed for sequence points to handle trappings, and
8766 side-effects. */
8767 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8769 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8770 arg1);
8771 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8772 tem);
8774 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8775 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8776 else if (TREE_CODE (arg1) == icode
8777 && simple_operand_p_2 (arg0)
8778 /* Needed for sequence points to handle trappings, and
8779 side-effects. */
8780 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8782 tem = fold_build2_loc (loc, ncode, type,
8783 arg0, TREE_OPERAND (arg1, 0));
8784 return fold_build2_loc (loc, icode, type, tem,
8785 TREE_OPERAND (arg1, 1));
8787 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8788 into (A OR B).
8789 For sequence point consistancy, we need to check for trapping,
8790 and side-effects. */
8791 else if (code == icode && simple_operand_p_2 (arg0)
8792 && simple_operand_p_2 (arg1))
8793 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8796 return NULL_TREE;
8799 /* Fold a binary expression of code CODE and type TYPE with operands
8800 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8801 Return the folded expression if folding is successful. Otherwise,
8802 return NULL_TREE. */
8804 static tree
8805 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8807 enum tree_code compl_code;
8809 if (code == MIN_EXPR)
8810 compl_code = MAX_EXPR;
8811 else if (code == MAX_EXPR)
8812 compl_code = MIN_EXPR;
8813 else
8814 gcc_unreachable ();
8816 /* MIN (MAX (a, b), b) == b. */
8817 if (TREE_CODE (op0) == compl_code
8818 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8819 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8821 /* MIN (MAX (b, a), b) == b. */
8822 if (TREE_CODE (op0) == compl_code
8823 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8824 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8825 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8827 /* MIN (a, MAX (a, b)) == a. */
8828 if (TREE_CODE (op1) == compl_code
8829 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8830 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8831 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8833 /* MIN (a, MAX (b, a)) == a. */
8834 if (TREE_CODE (op1) == compl_code
8835 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8836 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8837 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8839 return NULL_TREE;
8842 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8843 by changing CODE to reduce the magnitude of constants involved in
8844 ARG0 of the comparison.
8845 Returns a canonicalized comparison tree if a simplification was
8846 possible, otherwise returns NULL_TREE.
8847 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8848 valid if signed overflow is undefined. */
8850 static tree
8851 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8852 tree arg0, tree arg1,
8853 bool *strict_overflow_p)
8855 enum tree_code code0 = TREE_CODE (arg0);
8856 tree t, cst0 = NULL_TREE;
8857 int sgn0;
8858 bool swap = false;
8860 /* Match A +- CST code arg1 and CST code arg1. We can change the
8861 first form only if overflow is undefined. */
8862 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8863 /* In principle pointers also have undefined overflow behavior,
8864 but that causes problems elsewhere. */
8865 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8866 && (code0 == MINUS_EXPR
8867 || code0 == PLUS_EXPR)
8868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8869 || code0 == INTEGER_CST))
8870 return NULL_TREE;
8872 /* Identify the constant in arg0 and its sign. */
8873 if (code0 == INTEGER_CST)
8874 cst0 = arg0;
8875 else
8876 cst0 = TREE_OPERAND (arg0, 1);
8877 sgn0 = tree_int_cst_sgn (cst0);
8879 /* Overflowed constants and zero will cause problems. */
8880 if (integer_zerop (cst0)
8881 || TREE_OVERFLOW (cst0))
8882 return NULL_TREE;
8884 /* See if we can reduce the magnitude of the constant in
8885 arg0 by changing the comparison code. */
8886 if (code0 == INTEGER_CST)
8888 /* CST <= arg1 -> CST-1 < arg1. */
8889 if (code == LE_EXPR && sgn0 == 1)
8890 code = LT_EXPR;
8891 /* -CST < arg1 -> -CST-1 <= arg1. */
8892 else if (code == LT_EXPR && sgn0 == -1)
8893 code = LE_EXPR;
8894 /* CST > arg1 -> CST-1 >= arg1. */
8895 else if (code == GT_EXPR && sgn0 == 1)
8896 code = GE_EXPR;
8897 /* -CST >= arg1 -> -CST-1 > arg1. */
8898 else if (code == GE_EXPR && sgn0 == -1)
8899 code = GT_EXPR;
8900 else
8901 return NULL_TREE;
8902 /* arg1 code' CST' might be more canonical. */
8903 swap = true;
8905 else
8907 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8908 if (code == LT_EXPR
8909 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8910 code = LE_EXPR;
8911 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8912 else if (code == GT_EXPR
8913 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8914 code = GE_EXPR;
8915 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8916 else if (code == LE_EXPR
8917 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8918 code = LT_EXPR;
8919 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8920 else if (code == GE_EXPR
8921 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8922 code = GT_EXPR;
8923 else
8924 return NULL_TREE;
8925 *strict_overflow_p = true;
8928 /* Now build the constant reduced in magnitude. But not if that
8929 would produce one outside of its types range. */
8930 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8931 && ((sgn0 == 1
8932 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8933 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8934 || (sgn0 == -1
8935 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8936 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8937 /* We cannot swap the comparison here as that would cause us to
8938 endlessly recurse. */
8939 return NULL_TREE;
8941 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8942 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8943 if (code0 != INTEGER_CST)
8944 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8945 t = fold_convert (TREE_TYPE (arg1), t);
8947 /* If swapping might yield to a more canonical form, do so. */
8948 if (swap)
8949 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8950 else
8951 return fold_build2_loc (loc, code, type, t, arg1);
8954 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8955 overflow further. Try to decrease the magnitude of constants involved
8956 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8957 and put sole constants at the second argument position.
8958 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8960 static tree
8961 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8962 tree arg0, tree arg1)
8964 tree t;
8965 bool strict_overflow_p;
8966 const char * const warnmsg = G_("assuming signed overflow does not occur "
8967 "when reducing constant in comparison");
8969 /* Try canonicalization by simplifying arg0. */
8970 strict_overflow_p = false;
8971 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8972 &strict_overflow_p);
8973 if (t)
8975 if (strict_overflow_p)
8976 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8977 return t;
8980 /* Try canonicalization by simplifying arg1 using the swapped
8981 comparison. */
8982 code = swap_tree_comparison (code);
8983 strict_overflow_p = false;
8984 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8985 &strict_overflow_p);
8986 if (t && strict_overflow_p)
8987 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8988 return t;
8991 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8992 space. This is used to avoid issuing overflow warnings for
8993 expressions like &p->x which can not wrap. */
8995 static bool
8996 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8998 double_int di_offset, total;
9000 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9001 return true;
9003 if (bitpos < 0)
9004 return true;
9006 if (offset == NULL_TREE)
9007 di_offset = double_int_zero;
9008 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9009 return true;
9010 else
9011 di_offset = TREE_INT_CST (offset);
9013 bool overflow;
9014 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
9015 total = di_offset.add_with_sign (units, true, &overflow);
9016 if (overflow)
9017 return true;
9019 if (total.high != 0)
9020 return true;
9022 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9023 if (size <= 0)
9024 return true;
9026 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9027 array. */
9028 if (TREE_CODE (base) == ADDR_EXPR)
9030 HOST_WIDE_INT base_size;
9032 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9033 if (base_size > 0 && size < base_size)
9034 size = base_size;
9037 return total.low > (unsigned HOST_WIDE_INT) size;
9040 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
9041 kind INTEGER_CST. This makes sure to properly sign-extend the
9042 constant. */
9044 static HOST_WIDE_INT
9045 size_low_cst (const_tree t)
9047 double_int d = tree_to_double_int (t);
9048 return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
9051 /* Subroutine of fold_binary. This routine performs all of the
9052 transformations that are common to the equality/inequality
9053 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9054 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9055 fold_binary should call fold_binary. Fold a comparison with
9056 tree code CODE and type TYPE with operands OP0 and OP1. Return
9057 the folded comparison or NULL_TREE. */
9059 static tree
9060 fold_comparison (location_t loc, enum tree_code code, tree type,
9061 tree op0, tree op1)
9063 tree arg0, arg1, tem;
9065 arg0 = op0;
9066 arg1 = op1;
9068 STRIP_SIGN_NOPS (arg0);
9069 STRIP_SIGN_NOPS (arg1);
9071 tem = fold_relational_const (code, type, arg0, arg1);
9072 if (tem != NULL_TREE)
9073 return tem;
9075 /* If one arg is a real or integer constant, put it last. */
9076 if (tree_swap_operands_p (arg0, arg1, true))
9077 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9079 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9080 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9081 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9082 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9083 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9084 && (TREE_CODE (arg1) == INTEGER_CST
9085 && !TREE_OVERFLOW (arg1)))
9087 tree const1 = TREE_OPERAND (arg0, 1);
9088 tree const2 = arg1;
9089 tree variable = TREE_OPERAND (arg0, 0);
9090 tree lhs;
9091 int lhs_add;
9092 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9094 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9095 TREE_TYPE (arg1), const2, const1);
9097 /* If the constant operation overflowed this can be
9098 simplified as a comparison against INT_MAX/INT_MIN. */
9099 if (TREE_CODE (lhs) == INTEGER_CST
9100 && TREE_OVERFLOW (lhs))
9102 int const1_sgn = tree_int_cst_sgn (const1);
9103 enum tree_code code2 = code;
9105 /* Get the sign of the constant on the lhs if the
9106 operation were VARIABLE + CONST1. */
9107 if (TREE_CODE (arg0) == MINUS_EXPR)
9108 const1_sgn = -const1_sgn;
9110 /* The sign of the constant determines if we overflowed
9111 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9112 Canonicalize to the INT_MIN overflow by swapping the comparison
9113 if necessary. */
9114 if (const1_sgn == -1)
9115 code2 = swap_tree_comparison (code);
9117 /* We now can look at the canonicalized case
9118 VARIABLE + 1 CODE2 INT_MIN
9119 and decide on the result. */
9120 if (code2 == LT_EXPR
9121 || code2 == LE_EXPR
9122 || code2 == EQ_EXPR)
9123 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9124 else if (code2 == NE_EXPR
9125 || code2 == GE_EXPR
9126 || code2 == GT_EXPR)
9127 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9130 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9131 && (TREE_CODE (lhs) != INTEGER_CST
9132 || !TREE_OVERFLOW (lhs)))
9134 if (code != EQ_EXPR && code != NE_EXPR)
9135 fold_overflow_warning ("assuming signed overflow does not occur "
9136 "when changing X +- C1 cmp C2 to "
9137 "X cmp C1 +- C2",
9138 WARN_STRICT_OVERFLOW_COMPARISON);
9139 return fold_build2_loc (loc, code, type, variable, lhs);
9143 /* For comparisons of pointers we can decompose it to a compile time
9144 comparison of the base objects and the offsets into the object.
9145 This requires at least one operand being an ADDR_EXPR or a
9146 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9147 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9148 && (TREE_CODE (arg0) == ADDR_EXPR
9149 || TREE_CODE (arg1) == ADDR_EXPR
9150 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9151 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9153 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9154 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9155 enum machine_mode mode;
9156 int volatilep, unsignedp;
9157 bool indirect_base0 = false, indirect_base1 = false;
9159 /* Get base and offset for the access. Strip ADDR_EXPR for
9160 get_inner_reference, but put it back by stripping INDIRECT_REF
9161 off the base object if possible. indirect_baseN will be true
9162 if baseN is not an address but refers to the object itself. */
9163 base0 = arg0;
9164 if (TREE_CODE (arg0) == ADDR_EXPR)
9166 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9167 &bitsize, &bitpos0, &offset0, &mode,
9168 &unsignedp, &volatilep, false);
9169 if (TREE_CODE (base0) == INDIRECT_REF)
9170 base0 = TREE_OPERAND (base0, 0);
9171 else
9172 indirect_base0 = true;
9174 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9176 base0 = TREE_OPERAND (arg0, 0);
9177 STRIP_SIGN_NOPS (base0);
9178 if (TREE_CODE (base0) == ADDR_EXPR)
9180 base0 = TREE_OPERAND (base0, 0);
9181 indirect_base0 = true;
9183 offset0 = TREE_OPERAND (arg0, 1);
9184 if (tree_fits_shwi_p (offset0))
9186 HOST_WIDE_INT off = size_low_cst (offset0);
9187 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9188 * BITS_PER_UNIT)
9189 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9191 bitpos0 = off * BITS_PER_UNIT;
9192 offset0 = NULL_TREE;
9197 base1 = arg1;
9198 if (TREE_CODE (arg1) == ADDR_EXPR)
9200 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9201 &bitsize, &bitpos1, &offset1, &mode,
9202 &unsignedp, &volatilep, false);
9203 if (TREE_CODE (base1) == INDIRECT_REF)
9204 base1 = TREE_OPERAND (base1, 0);
9205 else
9206 indirect_base1 = true;
9208 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9210 base1 = TREE_OPERAND (arg1, 0);
9211 STRIP_SIGN_NOPS (base1);
9212 if (TREE_CODE (base1) == ADDR_EXPR)
9214 base1 = TREE_OPERAND (base1, 0);
9215 indirect_base1 = true;
9217 offset1 = TREE_OPERAND (arg1, 1);
9218 if (tree_fits_shwi_p (offset1))
9220 HOST_WIDE_INT off = size_low_cst (offset1);
9221 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9222 * BITS_PER_UNIT)
9223 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9225 bitpos1 = off * BITS_PER_UNIT;
9226 offset1 = NULL_TREE;
9231 /* A local variable can never be pointed to by
9232 the default SSA name of an incoming parameter. */
9233 if ((TREE_CODE (arg0) == ADDR_EXPR
9234 && indirect_base0
9235 && TREE_CODE (base0) == VAR_DECL
9236 && auto_var_in_fn_p (base0, current_function_decl)
9237 && !indirect_base1
9238 && TREE_CODE (base1) == SSA_NAME
9239 && SSA_NAME_IS_DEFAULT_DEF (base1)
9240 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9241 || (TREE_CODE (arg1) == ADDR_EXPR
9242 && indirect_base1
9243 && TREE_CODE (base1) == VAR_DECL
9244 && auto_var_in_fn_p (base1, current_function_decl)
9245 && !indirect_base0
9246 && TREE_CODE (base0) == SSA_NAME
9247 && SSA_NAME_IS_DEFAULT_DEF (base0)
9248 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9250 if (code == NE_EXPR)
9251 return constant_boolean_node (1, type);
9252 else if (code == EQ_EXPR)
9253 return constant_boolean_node (0, type);
9255 /* If we have equivalent bases we might be able to simplify. */
9256 else if (indirect_base0 == indirect_base1
9257 && operand_equal_p (base0, base1, 0))
9259 /* We can fold this expression to a constant if the non-constant
9260 offset parts are equal. */
9261 if ((offset0 == offset1
9262 || (offset0 && offset1
9263 && operand_equal_p (offset0, offset1, 0)))
9264 && (code == EQ_EXPR
9265 || code == NE_EXPR
9266 || (indirect_base0 && DECL_P (base0))
9267 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9270 if (code != EQ_EXPR
9271 && code != NE_EXPR
9272 && bitpos0 != bitpos1
9273 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9274 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9275 fold_overflow_warning (("assuming pointer wraparound does not "
9276 "occur when comparing P +- C1 with "
9277 "P +- C2"),
9278 WARN_STRICT_OVERFLOW_CONDITIONAL);
9280 switch (code)
9282 case EQ_EXPR:
9283 return constant_boolean_node (bitpos0 == bitpos1, type);
9284 case NE_EXPR:
9285 return constant_boolean_node (bitpos0 != bitpos1, type);
9286 case LT_EXPR:
9287 return constant_boolean_node (bitpos0 < bitpos1, type);
9288 case LE_EXPR:
9289 return constant_boolean_node (bitpos0 <= bitpos1, type);
9290 case GE_EXPR:
9291 return constant_boolean_node (bitpos0 >= bitpos1, type);
9292 case GT_EXPR:
9293 return constant_boolean_node (bitpos0 > bitpos1, type);
9294 default:;
9297 /* We can simplify the comparison to a comparison of the variable
9298 offset parts if the constant offset parts are equal.
9299 Be careful to use signed sizetype here because otherwise we
9300 mess with array offsets in the wrong way. This is possible
9301 because pointer arithmetic is restricted to retain within an
9302 object and overflow on pointer differences is undefined as of
9303 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9304 else if (bitpos0 == bitpos1
9305 && ((code == EQ_EXPR || code == NE_EXPR)
9306 || (indirect_base0 && DECL_P (base0))
9307 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9309 /* By converting to signed sizetype we cover middle-end pointer
9310 arithmetic which operates on unsigned pointer types of size
9311 type size and ARRAY_REF offsets which are properly sign or
9312 zero extended from their type in case it is narrower than
9313 sizetype. */
9314 if (offset0 == NULL_TREE)
9315 offset0 = build_int_cst (ssizetype, 0);
9316 else
9317 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9318 if (offset1 == NULL_TREE)
9319 offset1 = build_int_cst (ssizetype, 0);
9320 else
9321 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9323 if (code != EQ_EXPR
9324 && code != NE_EXPR
9325 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9326 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9327 fold_overflow_warning (("assuming pointer wraparound does not "
9328 "occur when comparing P +- C1 with "
9329 "P +- C2"),
9330 WARN_STRICT_OVERFLOW_COMPARISON);
9332 return fold_build2_loc (loc, code, type, offset0, offset1);
9335 /* For non-equal bases we can simplify if they are addresses
9336 of local binding decls or constants. */
9337 else if (indirect_base0 && indirect_base1
9338 /* We know that !operand_equal_p (base0, base1, 0)
9339 because the if condition was false. But make
9340 sure two decls are not the same. */
9341 && base0 != base1
9342 && TREE_CODE (arg0) == ADDR_EXPR
9343 && TREE_CODE (arg1) == ADDR_EXPR
9344 && (((TREE_CODE (base0) == VAR_DECL
9345 || TREE_CODE (base0) == PARM_DECL)
9346 && (targetm.binds_local_p (base0)
9347 || CONSTANT_CLASS_P (base1)))
9348 || CONSTANT_CLASS_P (base0))
9349 && (((TREE_CODE (base1) == VAR_DECL
9350 || TREE_CODE (base1) == PARM_DECL)
9351 && (targetm.binds_local_p (base1)
9352 || CONSTANT_CLASS_P (base0)))
9353 || CONSTANT_CLASS_P (base1)))
9355 if (code == EQ_EXPR)
9356 return omit_two_operands_loc (loc, type, boolean_false_node,
9357 arg0, arg1);
9358 else if (code == NE_EXPR)
9359 return omit_two_operands_loc (loc, type, boolean_true_node,
9360 arg0, arg1);
9362 /* For equal offsets we can simplify to a comparison of the
9363 base addresses. */
9364 else if (bitpos0 == bitpos1
9365 && (indirect_base0
9366 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9367 && (indirect_base1
9368 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9369 && ((offset0 == offset1)
9370 || (offset0 && offset1
9371 && operand_equal_p (offset0, offset1, 0))))
9373 if (indirect_base0)
9374 base0 = build_fold_addr_expr_loc (loc, base0);
9375 if (indirect_base1)
9376 base1 = build_fold_addr_expr_loc (loc, base1);
9377 return fold_build2_loc (loc, code, type, base0, base1);
9381 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9382 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9383 the resulting offset is smaller in absolute value than the
9384 original one. */
9385 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9386 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9387 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9388 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9389 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9390 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9391 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9393 tree const1 = TREE_OPERAND (arg0, 1);
9394 tree const2 = TREE_OPERAND (arg1, 1);
9395 tree variable1 = TREE_OPERAND (arg0, 0);
9396 tree variable2 = TREE_OPERAND (arg1, 0);
9397 tree cst;
9398 const char * const warnmsg = G_("assuming signed overflow does not "
9399 "occur when combining constants around "
9400 "a comparison");
9402 /* Put the constant on the side where it doesn't overflow and is
9403 of lower absolute value than before. */
9404 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9405 ? MINUS_EXPR : PLUS_EXPR,
9406 const2, const1);
9407 if (!TREE_OVERFLOW (cst)
9408 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9410 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9411 return fold_build2_loc (loc, code, type,
9412 variable1,
9413 fold_build2_loc (loc,
9414 TREE_CODE (arg1), TREE_TYPE (arg1),
9415 variable2, cst));
9418 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9419 ? MINUS_EXPR : PLUS_EXPR,
9420 const1, const2);
9421 if (!TREE_OVERFLOW (cst)
9422 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9424 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9425 return fold_build2_loc (loc, code, type,
9426 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9427 variable1, cst),
9428 variable2);
9432 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9433 signed arithmetic case. That form is created by the compiler
9434 often enough for folding it to be of value. One example is in
9435 computing loop trip counts after Operator Strength Reduction. */
9436 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9437 && TREE_CODE (arg0) == MULT_EXPR
9438 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9439 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9440 && integer_zerop (arg1))
9442 tree const1 = TREE_OPERAND (arg0, 1);
9443 tree const2 = arg1; /* zero */
9444 tree variable1 = TREE_OPERAND (arg0, 0);
9445 enum tree_code cmp_code = code;
9447 /* Handle unfolded multiplication by zero. */
9448 if (integer_zerop (const1))
9449 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9451 fold_overflow_warning (("assuming signed overflow does not occur when "
9452 "eliminating multiplication in comparison "
9453 "with zero"),
9454 WARN_STRICT_OVERFLOW_COMPARISON);
9456 /* If const1 is negative we swap the sense of the comparison. */
9457 if (tree_int_cst_sgn (const1) < 0)
9458 cmp_code = swap_tree_comparison (cmp_code);
9460 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9463 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9464 if (tem)
9465 return tem;
9467 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9469 tree targ0 = strip_float_extensions (arg0);
9470 tree targ1 = strip_float_extensions (arg1);
9471 tree newtype = TREE_TYPE (targ0);
9473 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9474 newtype = TREE_TYPE (targ1);
9476 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9477 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9478 return fold_build2_loc (loc, code, type,
9479 fold_convert_loc (loc, newtype, targ0),
9480 fold_convert_loc (loc, newtype, targ1));
9482 /* (-a) CMP (-b) -> b CMP a */
9483 if (TREE_CODE (arg0) == NEGATE_EXPR
9484 && TREE_CODE (arg1) == NEGATE_EXPR)
9485 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9486 TREE_OPERAND (arg0, 0));
9488 if (TREE_CODE (arg1) == REAL_CST)
9490 REAL_VALUE_TYPE cst;
9491 cst = TREE_REAL_CST (arg1);
9493 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9494 if (TREE_CODE (arg0) == NEGATE_EXPR)
9495 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9496 TREE_OPERAND (arg0, 0),
9497 build_real (TREE_TYPE (arg1),
9498 real_value_negate (&cst)));
9500 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9501 /* a CMP (-0) -> a CMP 0 */
9502 if (REAL_VALUE_MINUS_ZERO (cst))
9503 return fold_build2_loc (loc, code, type, arg0,
9504 build_real (TREE_TYPE (arg1), dconst0));
9506 /* x != NaN is always true, other ops are always false. */
9507 if (REAL_VALUE_ISNAN (cst)
9508 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9510 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9511 return omit_one_operand_loc (loc, type, tem, arg0);
9514 /* Fold comparisons against infinity. */
9515 if (REAL_VALUE_ISINF (cst)
9516 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9518 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9519 if (tem != NULL_TREE)
9520 return tem;
9524 /* If this is a comparison of a real constant with a PLUS_EXPR
9525 or a MINUS_EXPR of a real constant, we can convert it into a
9526 comparison with a revised real constant as long as no overflow
9527 occurs when unsafe_math_optimizations are enabled. */
9528 if (flag_unsafe_math_optimizations
9529 && TREE_CODE (arg1) == REAL_CST
9530 && (TREE_CODE (arg0) == PLUS_EXPR
9531 || TREE_CODE (arg0) == MINUS_EXPR)
9532 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9533 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9534 ? MINUS_EXPR : PLUS_EXPR,
9535 arg1, TREE_OPERAND (arg0, 1)))
9536 && !TREE_OVERFLOW (tem))
9537 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9539 /* Likewise, we can simplify a comparison of a real constant with
9540 a MINUS_EXPR whose first operand is also a real constant, i.e.
9541 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9542 floating-point types only if -fassociative-math is set. */
9543 if (flag_associative_math
9544 && TREE_CODE (arg1) == REAL_CST
9545 && TREE_CODE (arg0) == MINUS_EXPR
9546 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9547 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9548 arg1))
9549 && !TREE_OVERFLOW (tem))
9550 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9551 TREE_OPERAND (arg0, 1), tem);
9553 /* Fold comparisons against built-in math functions. */
9554 if (TREE_CODE (arg1) == REAL_CST
9555 && flag_unsafe_math_optimizations
9556 && ! flag_errno_math)
9558 enum built_in_function fcode = builtin_mathfn_code (arg0);
9560 if (fcode != END_BUILTINS)
9562 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9563 if (tem != NULL_TREE)
9564 return tem;
9569 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9570 && CONVERT_EXPR_P (arg0))
9572 /* If we are widening one operand of an integer comparison,
9573 see if the other operand is similarly being widened. Perhaps we
9574 can do the comparison in the narrower type. */
9575 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9576 if (tem)
9577 return tem;
9579 /* Or if we are changing signedness. */
9580 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9581 if (tem)
9582 return tem;
9585 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9586 constant, we can simplify it. */
9587 if (TREE_CODE (arg1) == INTEGER_CST
9588 && (TREE_CODE (arg0) == MIN_EXPR
9589 || TREE_CODE (arg0) == MAX_EXPR)
9590 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9592 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9593 if (tem)
9594 return tem;
9597 /* Simplify comparison of something with itself. (For IEEE
9598 floating-point, we can only do some of these simplifications.) */
9599 if (operand_equal_p (arg0, arg1, 0))
9601 switch (code)
9603 case EQ_EXPR:
9604 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9605 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9606 return constant_boolean_node (1, type);
9607 break;
9609 case GE_EXPR:
9610 case LE_EXPR:
9611 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9612 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9613 return constant_boolean_node (1, type);
9614 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9616 case NE_EXPR:
9617 /* For NE, we can only do this simplification if integer
9618 or we don't honor IEEE floating point NaNs. */
9619 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9620 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9621 break;
9622 /* ... fall through ... */
9623 case GT_EXPR:
9624 case LT_EXPR:
9625 return constant_boolean_node (0, type);
9626 default:
9627 gcc_unreachable ();
9631 /* If we are comparing an expression that just has comparisons
9632 of two integer values, arithmetic expressions of those comparisons,
9633 and constants, we can simplify it. There are only three cases
9634 to check: the two values can either be equal, the first can be
9635 greater, or the second can be greater. Fold the expression for
9636 those three values. Since each value must be 0 or 1, we have
9637 eight possibilities, each of which corresponds to the constant 0
9638 or 1 or one of the six possible comparisons.
9640 This handles common cases like (a > b) == 0 but also handles
9641 expressions like ((x > y) - (y > x)) > 0, which supposedly
9642 occur in macroized code. */
9644 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9646 tree cval1 = 0, cval2 = 0;
9647 int save_p = 0;
9649 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9650 /* Don't handle degenerate cases here; they should already
9651 have been handled anyway. */
9652 && cval1 != 0 && cval2 != 0
9653 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9654 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9655 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9656 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9657 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9658 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9659 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9661 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9662 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9664 /* We can't just pass T to eval_subst in case cval1 or cval2
9665 was the same as ARG1. */
9667 tree high_result
9668 = fold_build2_loc (loc, code, type,
9669 eval_subst (loc, arg0, cval1, maxval,
9670 cval2, minval),
9671 arg1);
9672 tree equal_result
9673 = fold_build2_loc (loc, code, type,
9674 eval_subst (loc, arg0, cval1, maxval,
9675 cval2, maxval),
9676 arg1);
9677 tree low_result
9678 = fold_build2_loc (loc, code, type,
9679 eval_subst (loc, arg0, cval1, minval,
9680 cval2, maxval),
9681 arg1);
9683 /* All three of these results should be 0 or 1. Confirm they are.
9684 Then use those values to select the proper code to use. */
9686 if (TREE_CODE (high_result) == INTEGER_CST
9687 && TREE_CODE (equal_result) == INTEGER_CST
9688 && TREE_CODE (low_result) == INTEGER_CST)
9690 /* Make a 3-bit mask with the high-order bit being the
9691 value for `>', the next for '=', and the low for '<'. */
9692 switch ((integer_onep (high_result) * 4)
9693 + (integer_onep (equal_result) * 2)
9694 + integer_onep (low_result))
9696 case 0:
9697 /* Always false. */
9698 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9699 case 1:
9700 code = LT_EXPR;
9701 break;
9702 case 2:
9703 code = EQ_EXPR;
9704 break;
9705 case 3:
9706 code = LE_EXPR;
9707 break;
9708 case 4:
9709 code = GT_EXPR;
9710 break;
9711 case 5:
9712 code = NE_EXPR;
9713 break;
9714 case 6:
9715 code = GE_EXPR;
9716 break;
9717 case 7:
9718 /* Always true. */
9719 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9722 if (save_p)
9724 tem = save_expr (build2 (code, type, cval1, cval2));
9725 SET_EXPR_LOCATION (tem, loc);
9726 return tem;
9728 return fold_build2_loc (loc, code, type, cval1, cval2);
9733 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9734 into a single range test. */
9735 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9736 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9737 && TREE_CODE (arg1) == INTEGER_CST
9738 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9739 && !integer_zerop (TREE_OPERAND (arg0, 1))
9740 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9741 && !TREE_OVERFLOW (arg1))
9743 tem = fold_div_compare (loc, code, type, arg0, arg1);
9744 if (tem != NULL_TREE)
9745 return tem;
9748 /* Fold ~X op ~Y as Y op X. */
9749 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9750 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9752 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9753 return fold_build2_loc (loc, code, type,
9754 fold_convert_loc (loc, cmp_type,
9755 TREE_OPERAND (arg1, 0)),
9756 TREE_OPERAND (arg0, 0));
9759 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9760 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9761 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9763 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9764 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9765 TREE_OPERAND (arg0, 0),
9766 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9767 fold_convert_loc (loc, cmp_type, arg1)));
9770 return NULL_TREE;
9774 /* Subroutine of fold_binary. Optimize complex multiplications of the
9775 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9776 argument EXPR represents the expression "z" of type TYPE. */
9778 static tree
9779 fold_mult_zconjz (location_t loc, tree type, tree expr)
9781 tree itype = TREE_TYPE (type);
9782 tree rpart, ipart, tem;
9784 if (TREE_CODE (expr) == COMPLEX_EXPR)
9786 rpart = TREE_OPERAND (expr, 0);
9787 ipart = TREE_OPERAND (expr, 1);
9789 else if (TREE_CODE (expr) == COMPLEX_CST)
9791 rpart = TREE_REALPART (expr);
9792 ipart = TREE_IMAGPART (expr);
9794 else
9796 expr = save_expr (expr);
9797 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9798 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9801 rpart = save_expr (rpart);
9802 ipart = save_expr (ipart);
9803 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9804 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9805 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9806 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9807 build_zero_cst (itype));
9811 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9812 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9813 guarantees that P and N have the same least significant log2(M) bits.
9814 N is not otherwise constrained. In particular, N is not normalized to
9815 0 <= N < M as is common. In general, the precise value of P is unknown.
9816 M is chosen as large as possible such that constant N can be determined.
9818 Returns M and sets *RESIDUE to N.
9820 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9821 account. This is not always possible due to PR 35705.
9824 static unsigned HOST_WIDE_INT
9825 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9826 bool allow_func_align)
9828 enum tree_code code;
9830 *residue = 0;
9832 code = TREE_CODE (expr);
9833 if (code == ADDR_EXPR)
9835 unsigned int bitalign;
9836 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9837 *residue /= BITS_PER_UNIT;
9838 return bitalign / BITS_PER_UNIT;
9840 else if (code == POINTER_PLUS_EXPR)
9842 tree op0, op1;
9843 unsigned HOST_WIDE_INT modulus;
9844 enum tree_code inner_code;
9846 op0 = TREE_OPERAND (expr, 0);
9847 STRIP_NOPS (op0);
9848 modulus = get_pointer_modulus_and_residue (op0, residue,
9849 allow_func_align);
9851 op1 = TREE_OPERAND (expr, 1);
9852 STRIP_NOPS (op1);
9853 inner_code = TREE_CODE (op1);
9854 if (inner_code == INTEGER_CST)
9856 *residue += TREE_INT_CST_LOW (op1);
9857 return modulus;
9859 else if (inner_code == MULT_EXPR)
9861 op1 = TREE_OPERAND (op1, 1);
9862 if (TREE_CODE (op1) == INTEGER_CST)
9864 unsigned HOST_WIDE_INT align;
9866 /* Compute the greatest power-of-2 divisor of op1. */
9867 align = TREE_INT_CST_LOW (op1);
9868 align &= -align;
9870 /* If align is non-zero and less than *modulus, replace
9871 *modulus with align., If align is 0, then either op1 is 0
9872 or the greatest power-of-2 divisor of op1 doesn't fit in an
9873 unsigned HOST_WIDE_INT. In either case, no additional
9874 constraint is imposed. */
9875 if (align)
9876 modulus = MIN (modulus, align);
9878 return modulus;
9883 /* If we get here, we were unable to determine anything useful about the
9884 expression. */
9885 return 1;
9888 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9889 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9891 static bool
9892 vec_cst_ctor_to_array (tree arg, tree *elts)
9894 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9896 if (TREE_CODE (arg) == VECTOR_CST)
9898 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9899 elts[i] = VECTOR_CST_ELT (arg, i);
9901 else if (TREE_CODE (arg) == CONSTRUCTOR)
9903 constructor_elt *elt;
9905 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9906 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9907 return false;
9908 else
9909 elts[i] = elt->value;
9911 else
9912 return false;
9913 for (; i < nelts; i++)
9914 elts[i]
9915 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9916 return true;
9919 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9920 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9921 NULL_TREE otherwise. */
9923 static tree
9924 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9926 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9927 tree *elts;
9928 bool need_ctor = false;
9930 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9931 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9932 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9933 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9934 return NULL_TREE;
9936 elts = XALLOCAVEC (tree, nelts * 3);
9937 if (!vec_cst_ctor_to_array (arg0, elts)
9938 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9939 return NULL_TREE;
9941 for (i = 0; i < nelts; i++)
9943 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9944 need_ctor = true;
9945 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9948 if (need_ctor)
9950 vec<constructor_elt, va_gc> *v;
9951 vec_alloc (v, nelts);
9952 for (i = 0; i < nelts; i++)
9953 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9954 return build_constructor (type, v);
9956 else
9957 return build_vector (type, &elts[2 * nelts]);
9960 /* Try to fold a pointer difference of type TYPE two address expressions of
9961 array references AREF0 and AREF1 using location LOC. Return a
9962 simplified expression for the difference or NULL_TREE. */
9964 static tree
9965 fold_addr_of_array_ref_difference (location_t loc, tree type,
9966 tree aref0, tree aref1)
9968 tree base0 = TREE_OPERAND (aref0, 0);
9969 tree base1 = TREE_OPERAND (aref1, 0);
9970 tree base_offset = build_int_cst (type, 0);
9972 /* If the bases are array references as well, recurse. If the bases
9973 are pointer indirections compute the difference of the pointers.
9974 If the bases are equal, we are set. */
9975 if ((TREE_CODE (base0) == ARRAY_REF
9976 && TREE_CODE (base1) == ARRAY_REF
9977 && (base_offset
9978 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9979 || (INDIRECT_REF_P (base0)
9980 && INDIRECT_REF_P (base1)
9981 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9982 TREE_OPERAND (base0, 0),
9983 TREE_OPERAND (base1, 0))))
9984 || operand_equal_p (base0, base1, 0))
9986 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9987 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9988 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9989 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9990 return fold_build2_loc (loc, PLUS_EXPR, type,
9991 base_offset,
9992 fold_build2_loc (loc, MULT_EXPR, type,
9993 diff, esz));
9995 return NULL_TREE;
9998 /* If the real or vector real constant CST of type TYPE has an exact
9999 inverse, return it, else return NULL. */
10001 static tree
10002 exact_inverse (tree type, tree cst)
10004 REAL_VALUE_TYPE r;
10005 tree unit_type, *elts;
10006 enum machine_mode mode;
10007 unsigned vec_nelts, i;
10009 switch (TREE_CODE (cst))
10011 case REAL_CST:
10012 r = TREE_REAL_CST (cst);
10014 if (exact_real_inverse (TYPE_MODE (type), &r))
10015 return build_real (type, r);
10017 return NULL_TREE;
10019 case VECTOR_CST:
10020 vec_nelts = VECTOR_CST_NELTS (cst);
10021 elts = XALLOCAVEC (tree, vec_nelts);
10022 unit_type = TREE_TYPE (type);
10023 mode = TYPE_MODE (unit_type);
10025 for (i = 0; i < vec_nelts; i++)
10027 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10028 if (!exact_real_inverse (mode, &r))
10029 return NULL_TREE;
10030 elts[i] = build_real (unit_type, r);
10033 return build_vector (type, elts);
10035 default:
10036 return NULL_TREE;
10040 /* Mask out the tz least significant bits of X of type TYPE where
10041 tz is the number of trailing zeroes in Y. */
10042 static double_int
10043 mask_with_tz (tree type, double_int x, double_int y)
10045 int tz = y.trailing_zeros ();
10047 if (tz > 0)
10049 double_int mask;
10051 mask = ~double_int::mask (tz);
10052 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
10053 return mask & x;
10055 return x;
10058 /* Return true when T is an address and is known to be nonzero.
10059 For floating point we further ensure that T is not denormal.
10060 Similar logic is present in nonzero_address in rtlanal.h.
10062 If the return value is based on the assumption that signed overflow
10063 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10064 change *STRICT_OVERFLOW_P. */
10066 static bool
10067 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10069 tree type = TREE_TYPE (t);
10070 enum tree_code code;
10072 /* Doing something useful for floating point would need more work. */
10073 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10074 return false;
10076 code = TREE_CODE (t);
10077 switch (TREE_CODE_CLASS (code))
10079 case tcc_unary:
10080 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10081 strict_overflow_p);
10082 case tcc_binary:
10083 case tcc_comparison:
10084 return tree_binary_nonzero_warnv_p (code, type,
10085 TREE_OPERAND (t, 0),
10086 TREE_OPERAND (t, 1),
10087 strict_overflow_p);
10088 case tcc_constant:
10089 case tcc_declaration:
10090 case tcc_reference:
10091 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10093 default:
10094 break;
10097 switch (code)
10099 case TRUTH_NOT_EXPR:
10100 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10101 strict_overflow_p);
10103 case TRUTH_AND_EXPR:
10104 case TRUTH_OR_EXPR:
10105 case TRUTH_XOR_EXPR:
10106 return tree_binary_nonzero_warnv_p (code, type,
10107 TREE_OPERAND (t, 0),
10108 TREE_OPERAND (t, 1),
10109 strict_overflow_p);
10111 case COND_EXPR:
10112 case CONSTRUCTOR:
10113 case OBJ_TYPE_REF:
10114 case ASSERT_EXPR:
10115 case ADDR_EXPR:
10116 case WITH_SIZE_EXPR:
10117 case SSA_NAME:
10118 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10120 case COMPOUND_EXPR:
10121 case MODIFY_EXPR:
10122 case BIND_EXPR:
10123 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10124 strict_overflow_p);
10126 case SAVE_EXPR:
10127 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10128 strict_overflow_p);
10130 case CALL_EXPR:
10132 tree fndecl = get_callee_fndecl (t);
10133 if (!fndecl) return false;
10134 if (flag_delete_null_pointer_checks && !flag_check_new
10135 && DECL_IS_OPERATOR_NEW (fndecl)
10136 && !TREE_NOTHROW (fndecl))
10137 return true;
10138 if (flag_delete_null_pointer_checks
10139 && lookup_attribute ("returns_nonnull",
10140 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10141 return true;
10142 return alloca_call_p (t);
10145 default:
10146 break;
10148 return false;
10151 /* Return true when T is an address and is known to be nonzero.
10152 Handle warnings about undefined signed overflow. */
10154 static bool
10155 tree_expr_nonzero_p (tree t)
10157 bool ret, strict_overflow_p;
10159 strict_overflow_p = false;
10160 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10161 if (strict_overflow_p)
10162 fold_overflow_warning (("assuming signed overflow does not occur when "
10163 "determining that expression is always "
10164 "non-zero"),
10165 WARN_STRICT_OVERFLOW_MISC);
10166 return ret;
10169 /* Fold a binary expression of code CODE and type TYPE with operands
10170 OP0 and OP1. LOC is the location of the resulting expression.
10171 Return the folded expression if folding is successful. Otherwise,
10172 return NULL_TREE. */
10174 static tree
10175 fold_binary_loc_1 (location_t loc,
10176 enum tree_code code, tree type, tree op0, tree op1)
10178 enum tree_code_class kind = TREE_CODE_CLASS (code);
10179 tree arg0, arg1, tem;
10180 tree t1 = NULL_TREE;
10181 bool strict_overflow_p;
10182 unsigned int prec;
10184 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10185 && TREE_CODE_LENGTH (code) == 2
10186 && op0 != NULL_TREE
10187 && op1 != NULL_TREE);
10189 arg0 = op0;
10190 arg1 = op1;
10192 /* Strip any conversions that don't change the mode. This is
10193 safe for every expression, except for a comparison expression
10194 because its signedness is derived from its operands. So, in
10195 the latter case, only strip conversions that don't change the
10196 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10197 preserved.
10199 Note that this is done as an internal manipulation within the
10200 constant folder, in order to find the simplest representation
10201 of the arguments so that their form can be studied. In any
10202 cases, the appropriate type conversions should be put back in
10203 the tree that will get out of the constant folder. */
10205 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10207 STRIP_SIGN_NOPS (arg0);
10208 STRIP_SIGN_NOPS (arg1);
10210 else
10212 STRIP_NOPS (arg0);
10213 STRIP_NOPS (arg1);
10216 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10217 constant but we can't do arithmetic on them. */
10218 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10219 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10220 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10221 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10222 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10223 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10224 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10226 if (kind == tcc_binary)
10228 /* Make sure type and arg0 have the same saturating flag. */
10229 gcc_assert (TYPE_SATURATING (type)
10230 == TYPE_SATURATING (TREE_TYPE (arg0)));
10231 tem = const_binop (code, arg0, arg1);
10233 else if (kind == tcc_comparison)
10234 tem = fold_relational_const (code, type, arg0, arg1);
10235 else
10236 tem = NULL_TREE;
10238 if (tem != NULL_TREE)
10240 if (TREE_TYPE (tem) != type)
10241 tem = fold_convert_loc (loc, type, tem);
10242 return tem;
10246 /* If this is a commutative operation, and ARG0 is a constant, move it
10247 to ARG1 to reduce the number of tests below. */
10248 if (commutative_tree_code (code)
10249 && tree_swap_operands_p (arg0, arg1, true))
10250 return fold_build2_loc (loc, code, type, op1, op0);
10252 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10254 First check for cases where an arithmetic operation is applied to a
10255 compound, conditional, or comparison operation. Push the arithmetic
10256 operation inside the compound or conditional to see if any folding
10257 can then be done. Convert comparison to conditional for this purpose.
10258 The also optimizes non-constant cases that used to be done in
10259 expand_expr.
10261 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10262 one of the operands is a comparison and the other is a comparison, a
10263 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10264 code below would make the expression more complex. Change it to a
10265 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10266 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10268 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10269 || code == EQ_EXPR || code == NE_EXPR)
10270 && TREE_CODE (type) != VECTOR_TYPE
10271 && ((truth_value_p (TREE_CODE (arg0))
10272 && (truth_value_p (TREE_CODE (arg1))
10273 || (TREE_CODE (arg1) == BIT_AND_EXPR
10274 && integer_onep (TREE_OPERAND (arg1, 1)))))
10275 || (truth_value_p (TREE_CODE (arg1))
10276 && (truth_value_p (TREE_CODE (arg0))
10277 || (TREE_CODE (arg0) == BIT_AND_EXPR
10278 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10280 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10281 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10282 : TRUTH_XOR_EXPR,
10283 boolean_type_node,
10284 fold_convert_loc (loc, boolean_type_node, arg0),
10285 fold_convert_loc (loc, boolean_type_node, arg1));
10287 if (code == EQ_EXPR)
10288 tem = invert_truthvalue_loc (loc, tem);
10290 return fold_convert_loc (loc, type, tem);
10293 if (TREE_CODE_CLASS (code) == tcc_binary
10294 || TREE_CODE_CLASS (code) == tcc_comparison)
10296 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10298 tem = fold_build2_loc (loc, code, type,
10299 fold_convert_loc (loc, TREE_TYPE (op0),
10300 TREE_OPERAND (arg0, 1)), op1);
10301 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10302 tem);
10304 if (TREE_CODE (arg1) == COMPOUND_EXPR
10305 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10307 tem = fold_build2_loc (loc, code, type, op0,
10308 fold_convert_loc (loc, TREE_TYPE (op1),
10309 TREE_OPERAND (arg1, 1)));
10310 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10311 tem);
10314 if (TREE_CODE (arg0) == COND_EXPR
10315 || TREE_CODE (arg0) == VEC_COND_EXPR
10316 || COMPARISON_CLASS_P (arg0))
10318 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10319 arg0, arg1,
10320 /*cond_first_p=*/1);
10321 if (tem != NULL_TREE)
10322 return tem;
10325 if (TREE_CODE (arg1) == COND_EXPR
10326 || TREE_CODE (arg1) == VEC_COND_EXPR
10327 || COMPARISON_CLASS_P (arg1))
10329 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10330 arg1, arg0,
10331 /*cond_first_p=*/0);
10332 if (tem != NULL_TREE)
10333 return tem;
10337 switch (code)
10339 case MEM_REF:
10340 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10341 if (TREE_CODE (arg0) == ADDR_EXPR
10342 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10344 tree iref = TREE_OPERAND (arg0, 0);
10345 return fold_build2 (MEM_REF, type,
10346 TREE_OPERAND (iref, 0),
10347 int_const_binop (PLUS_EXPR, arg1,
10348 TREE_OPERAND (iref, 1)));
10351 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10352 if (TREE_CODE (arg0) == ADDR_EXPR
10353 && handled_component_p (TREE_OPERAND (arg0, 0)))
10355 tree base;
10356 HOST_WIDE_INT coffset;
10357 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10358 &coffset);
10359 if (!base)
10360 return NULL_TREE;
10361 return fold_build2 (MEM_REF, type,
10362 build_fold_addr_expr (base),
10363 int_const_binop (PLUS_EXPR, arg1,
10364 size_int (coffset)));
10367 return NULL_TREE;
10369 case POINTER_PLUS_EXPR:
10370 /* 0 +p index -> (type)index */
10371 if (integer_zerop (arg0))
10372 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10374 /* PTR +p 0 -> PTR */
10375 if (integer_zerop (arg1))
10376 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10378 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10379 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10380 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10381 return fold_convert_loc (loc, type,
10382 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10383 fold_convert_loc (loc, sizetype,
10384 arg1),
10385 fold_convert_loc (loc, sizetype,
10386 arg0)));
10388 /* (PTR +p B) +p A -> PTR +p (B + A) */
10389 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10391 tree inner;
10392 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10393 tree arg00 = TREE_OPERAND (arg0, 0);
10394 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10395 arg01, fold_convert_loc (loc, sizetype, arg1));
10396 return fold_convert_loc (loc, type,
10397 fold_build_pointer_plus_loc (loc,
10398 arg00, inner));
10401 /* PTR_CST +p CST -> CST1 */
10402 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10403 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10404 fold_convert_loc (loc, type, arg1));
10406 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10407 of the array. Loop optimizer sometimes produce this type of
10408 expressions. */
10409 if (TREE_CODE (arg0) == ADDR_EXPR)
10411 tem = try_move_mult_to_index (loc, arg0,
10412 fold_convert_loc (loc,
10413 ssizetype, arg1));
10414 if (tem)
10415 return fold_convert_loc (loc, type, tem);
10418 return NULL_TREE;
10420 case PLUS_EXPR:
10421 /* A + (-B) -> A - B */
10422 if (TREE_CODE (arg1) == NEGATE_EXPR
10423 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10424 return fold_build2_loc (loc, MINUS_EXPR, type,
10425 fold_convert_loc (loc, type, arg0),
10426 fold_convert_loc (loc, type,
10427 TREE_OPERAND (arg1, 0)));
10428 /* (-A) + B -> B - A */
10429 if (TREE_CODE (arg0) == NEGATE_EXPR
10430 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10431 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10432 return fold_build2_loc (loc, MINUS_EXPR, type,
10433 fold_convert_loc (loc, type, arg1),
10434 fold_convert_loc (loc, type,
10435 TREE_OPERAND (arg0, 0)));
10437 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10439 /* Convert ~A + 1 to -A. */
10440 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10441 && integer_onep (arg1))
10442 return fold_build1_loc (loc, NEGATE_EXPR, type,
10443 fold_convert_loc (loc, type,
10444 TREE_OPERAND (arg0, 0)));
10446 /* ~X + X is -1. */
10447 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10448 && !TYPE_OVERFLOW_TRAPS (type))
10450 tree tem = TREE_OPERAND (arg0, 0);
10452 STRIP_NOPS (tem);
10453 if (operand_equal_p (tem, arg1, 0))
10455 t1 = build_all_ones_cst (type);
10456 return omit_one_operand_loc (loc, type, t1, arg1);
10460 /* X + ~X is -1. */
10461 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10462 && !TYPE_OVERFLOW_TRAPS (type))
10464 tree tem = TREE_OPERAND (arg1, 0);
10466 STRIP_NOPS (tem);
10467 if (operand_equal_p (arg0, tem, 0))
10469 t1 = build_all_ones_cst (type);
10470 return omit_one_operand_loc (loc, type, t1, arg0);
10474 /* X + (X / CST) * -CST is X % CST. */
10475 if (TREE_CODE (arg1) == MULT_EXPR
10476 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10477 && operand_equal_p (arg0,
10478 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10480 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10481 tree cst1 = TREE_OPERAND (arg1, 1);
10482 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10483 cst1, cst0);
10484 if (sum && integer_zerop (sum))
10485 return fold_convert_loc (loc, type,
10486 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10487 TREE_TYPE (arg0), arg0,
10488 cst0));
10492 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10493 one. Make sure the type is not saturating and has the signedness of
10494 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10495 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10496 if ((TREE_CODE (arg0) == MULT_EXPR
10497 || TREE_CODE (arg1) == MULT_EXPR)
10498 && !TYPE_SATURATING (type)
10499 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10500 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10501 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10503 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10504 if (tem)
10505 return tem;
10508 if (! FLOAT_TYPE_P (type))
10510 if (integer_zerop (arg1))
10511 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10513 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10514 with a constant, and the two constants have no bits in common,
10515 we should treat this as a BIT_IOR_EXPR since this may produce more
10516 simplifications. */
10517 if (TREE_CODE (arg0) == BIT_AND_EXPR
10518 && TREE_CODE (arg1) == BIT_AND_EXPR
10519 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10520 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10521 && integer_zerop (const_binop (BIT_AND_EXPR,
10522 TREE_OPERAND (arg0, 1),
10523 TREE_OPERAND (arg1, 1))))
10525 code = BIT_IOR_EXPR;
10526 goto bit_ior;
10529 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10530 (plus (plus (mult) (mult)) (foo)) so that we can
10531 take advantage of the factoring cases below. */
10532 if (TYPE_OVERFLOW_WRAPS (type)
10533 && (((TREE_CODE (arg0) == PLUS_EXPR
10534 || TREE_CODE (arg0) == MINUS_EXPR)
10535 && TREE_CODE (arg1) == MULT_EXPR)
10536 || ((TREE_CODE (arg1) == PLUS_EXPR
10537 || TREE_CODE (arg1) == MINUS_EXPR)
10538 && TREE_CODE (arg0) == MULT_EXPR)))
10540 tree parg0, parg1, parg, marg;
10541 enum tree_code pcode;
10543 if (TREE_CODE (arg1) == MULT_EXPR)
10544 parg = arg0, marg = arg1;
10545 else
10546 parg = arg1, marg = arg0;
10547 pcode = TREE_CODE (parg);
10548 parg0 = TREE_OPERAND (parg, 0);
10549 parg1 = TREE_OPERAND (parg, 1);
10550 STRIP_NOPS (parg0);
10551 STRIP_NOPS (parg1);
10553 if (TREE_CODE (parg0) == MULT_EXPR
10554 && TREE_CODE (parg1) != MULT_EXPR)
10555 return fold_build2_loc (loc, pcode, type,
10556 fold_build2_loc (loc, PLUS_EXPR, type,
10557 fold_convert_loc (loc, type,
10558 parg0),
10559 fold_convert_loc (loc, type,
10560 marg)),
10561 fold_convert_loc (loc, type, parg1));
10562 if (TREE_CODE (parg0) != MULT_EXPR
10563 && TREE_CODE (parg1) == MULT_EXPR)
10564 return
10565 fold_build2_loc (loc, PLUS_EXPR, type,
10566 fold_convert_loc (loc, type, parg0),
10567 fold_build2_loc (loc, pcode, type,
10568 fold_convert_loc (loc, type, marg),
10569 fold_convert_loc (loc, type,
10570 parg1)));
10573 else
10575 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10576 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10577 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10579 /* Likewise if the operands are reversed. */
10580 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10581 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10583 /* Convert X + -C into X - C. */
10584 if (TREE_CODE (arg1) == REAL_CST
10585 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10587 tem = fold_negate_const (arg1, type);
10588 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10589 return fold_build2_loc (loc, MINUS_EXPR, type,
10590 fold_convert_loc (loc, type, arg0),
10591 fold_convert_loc (loc, type, tem));
10594 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10595 to __complex__ ( x, y ). This is not the same for SNaNs or
10596 if signed zeros are involved. */
10597 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10599 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10601 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10602 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10603 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10604 bool arg0rz = false, arg0iz = false;
10605 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10606 || (arg0i && (arg0iz = real_zerop (arg0i))))
10608 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10609 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10610 if (arg0rz && arg1i && real_zerop (arg1i))
10612 tree rp = arg1r ? arg1r
10613 : build1 (REALPART_EXPR, rtype, arg1);
10614 tree ip = arg0i ? arg0i
10615 : build1 (IMAGPART_EXPR, rtype, arg0);
10616 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10618 else if (arg0iz && arg1r && real_zerop (arg1r))
10620 tree rp = arg0r ? arg0r
10621 : build1 (REALPART_EXPR, rtype, arg0);
10622 tree ip = arg1i ? arg1i
10623 : build1 (IMAGPART_EXPR, rtype, arg1);
10624 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10629 if (flag_unsafe_math_optimizations
10630 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10631 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10632 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10633 return tem;
10635 /* Convert x+x into x*2.0. */
10636 if (operand_equal_p (arg0, arg1, 0)
10637 && SCALAR_FLOAT_TYPE_P (type))
10638 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10639 build_real (type, dconst2));
10641 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10642 We associate floats only if the user has specified
10643 -fassociative-math. */
10644 if (flag_associative_math
10645 && TREE_CODE (arg1) == PLUS_EXPR
10646 && TREE_CODE (arg0) != MULT_EXPR)
10648 tree tree10 = TREE_OPERAND (arg1, 0);
10649 tree tree11 = TREE_OPERAND (arg1, 1);
10650 if (TREE_CODE (tree11) == MULT_EXPR
10651 && TREE_CODE (tree10) == MULT_EXPR)
10653 tree tree0;
10654 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10655 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10658 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10659 We associate floats only if the user has specified
10660 -fassociative-math. */
10661 if (flag_associative_math
10662 && TREE_CODE (arg0) == PLUS_EXPR
10663 && TREE_CODE (arg1) != MULT_EXPR)
10665 tree tree00 = TREE_OPERAND (arg0, 0);
10666 tree tree01 = TREE_OPERAND (arg0, 1);
10667 if (TREE_CODE (tree01) == MULT_EXPR
10668 && TREE_CODE (tree00) == MULT_EXPR)
10670 tree tree0;
10671 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10672 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10677 bit_rotate:
10678 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10679 is a rotate of A by C1 bits. */
10680 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10681 is a rotate of A by B bits. */
10683 enum tree_code code0, code1;
10684 tree rtype;
10685 code0 = TREE_CODE (arg0);
10686 code1 = TREE_CODE (arg1);
10687 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10688 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10689 && operand_equal_p (TREE_OPERAND (arg0, 0),
10690 TREE_OPERAND (arg1, 0), 0)
10691 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10692 TYPE_UNSIGNED (rtype))
10693 /* Only create rotates in complete modes. Other cases are not
10694 expanded properly. */
10695 && (element_precision (rtype)
10696 == element_precision (TYPE_MODE (rtype))))
10698 tree tree01, tree11;
10699 enum tree_code code01, code11;
10701 tree01 = TREE_OPERAND (arg0, 1);
10702 tree11 = TREE_OPERAND (arg1, 1);
10703 STRIP_NOPS (tree01);
10704 STRIP_NOPS (tree11);
10705 code01 = TREE_CODE (tree01);
10706 code11 = TREE_CODE (tree11);
10707 if (code01 == INTEGER_CST
10708 && code11 == INTEGER_CST
10709 && TREE_INT_CST_HIGH (tree01) == 0
10710 && TREE_INT_CST_HIGH (tree11) == 0
10711 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10712 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10714 tem = build2_loc (loc, LROTATE_EXPR,
10715 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10716 TREE_OPERAND (arg0, 0),
10717 code0 == LSHIFT_EXPR ? tree01 : tree11);
10718 return fold_convert_loc (loc, type, tem);
10720 else if (code11 == MINUS_EXPR)
10722 tree tree110, tree111;
10723 tree110 = TREE_OPERAND (tree11, 0);
10724 tree111 = TREE_OPERAND (tree11, 1);
10725 STRIP_NOPS (tree110);
10726 STRIP_NOPS (tree111);
10727 if (TREE_CODE (tree110) == INTEGER_CST
10728 && 0 == compare_tree_int (tree110,
10729 element_precision
10730 (TREE_TYPE (TREE_OPERAND
10731 (arg0, 0))))
10732 && operand_equal_p (tree01, tree111, 0))
10733 return
10734 fold_convert_loc (loc, type,
10735 build2 ((code0 == LSHIFT_EXPR
10736 ? LROTATE_EXPR
10737 : RROTATE_EXPR),
10738 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10739 TREE_OPERAND (arg0, 0), tree01));
10741 else if (code01 == MINUS_EXPR)
10743 tree tree010, tree011;
10744 tree010 = TREE_OPERAND (tree01, 0);
10745 tree011 = TREE_OPERAND (tree01, 1);
10746 STRIP_NOPS (tree010);
10747 STRIP_NOPS (tree011);
10748 if (TREE_CODE (tree010) == INTEGER_CST
10749 && 0 == compare_tree_int (tree010,
10750 element_precision
10751 (TREE_TYPE (TREE_OPERAND
10752 (arg0, 0))))
10753 && operand_equal_p (tree11, tree011, 0))
10754 return fold_convert_loc
10755 (loc, type,
10756 build2 ((code0 != LSHIFT_EXPR
10757 ? LROTATE_EXPR
10758 : RROTATE_EXPR),
10759 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10760 TREE_OPERAND (arg0, 0), tree11));
10765 associate:
10766 /* In most languages, can't associate operations on floats through
10767 parentheses. Rather than remember where the parentheses were, we
10768 don't associate floats at all, unless the user has specified
10769 -fassociative-math.
10770 And, we need to make sure type is not saturating. */
10772 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10773 && !TYPE_SATURATING (type))
10775 tree var0, con0, lit0, minus_lit0;
10776 tree var1, con1, lit1, minus_lit1;
10777 tree atype = type;
10778 bool ok = true;
10780 /* Split both trees into variables, constants, and literals. Then
10781 associate each group together, the constants with literals,
10782 then the result with variables. This increases the chances of
10783 literals being recombined later and of generating relocatable
10784 expressions for the sum of a constant and literal. */
10785 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10786 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10787 code == MINUS_EXPR);
10789 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10790 if (code == MINUS_EXPR)
10791 code = PLUS_EXPR;
10793 /* With undefined overflow prefer doing association in a type
10794 which wraps on overflow, if that is one of the operand types. */
10795 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10796 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10798 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10799 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10800 atype = TREE_TYPE (arg0);
10801 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10802 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10803 atype = TREE_TYPE (arg1);
10804 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10807 /* With undefined overflow we can only associate constants with one
10808 variable, and constants whose association doesn't overflow. */
10809 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10810 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10812 if (var0 && var1)
10814 tree tmp0 = var0;
10815 tree tmp1 = var1;
10817 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10818 tmp0 = TREE_OPERAND (tmp0, 0);
10819 if (CONVERT_EXPR_P (tmp0)
10820 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10821 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10822 <= TYPE_PRECISION (atype)))
10823 tmp0 = TREE_OPERAND (tmp0, 0);
10824 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10825 tmp1 = TREE_OPERAND (tmp1, 0);
10826 if (CONVERT_EXPR_P (tmp1)
10827 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10828 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10829 <= TYPE_PRECISION (atype)))
10830 tmp1 = TREE_OPERAND (tmp1, 0);
10831 /* The only case we can still associate with two variables
10832 is if they are the same, modulo negation and bit-pattern
10833 preserving conversions. */
10834 if (!operand_equal_p (tmp0, tmp1, 0))
10835 ok = false;
10839 /* Only do something if we found more than two objects. Otherwise,
10840 nothing has changed and we risk infinite recursion. */
10841 if (ok
10842 && (2 < ((var0 != 0) + (var1 != 0)
10843 + (con0 != 0) + (con1 != 0)
10844 + (lit0 != 0) + (lit1 != 0)
10845 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10847 bool any_overflows = false;
10848 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10849 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10850 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10851 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10852 var0 = associate_trees (loc, var0, var1, code, atype);
10853 con0 = associate_trees (loc, con0, con1, code, atype);
10854 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10855 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10856 code, atype);
10858 /* Preserve the MINUS_EXPR if the negative part of the literal is
10859 greater than the positive part. Otherwise, the multiplicative
10860 folding code (i.e extract_muldiv) may be fooled in case
10861 unsigned constants are subtracted, like in the following
10862 example: ((X*2 + 4) - 8U)/2. */
10863 if (minus_lit0 && lit0)
10865 if (TREE_CODE (lit0) == INTEGER_CST
10866 && TREE_CODE (minus_lit0) == INTEGER_CST
10867 && tree_int_cst_lt (lit0, minus_lit0))
10869 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10870 MINUS_EXPR, atype);
10871 lit0 = 0;
10873 else
10875 lit0 = associate_trees (loc, lit0, minus_lit0,
10876 MINUS_EXPR, atype);
10877 minus_lit0 = 0;
10881 /* Don't introduce overflows through reassociation. */
10882 if (!any_overflows
10883 && ((lit0 && TREE_OVERFLOW (lit0))
10884 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10885 return NULL_TREE;
10887 if (minus_lit0)
10889 if (con0 == 0)
10890 return
10891 fold_convert_loc (loc, type,
10892 associate_trees (loc, var0, minus_lit0,
10893 MINUS_EXPR, atype));
10894 else
10896 con0 = associate_trees (loc, con0, minus_lit0,
10897 MINUS_EXPR, atype);
10898 return
10899 fold_convert_loc (loc, type,
10900 associate_trees (loc, var0, con0,
10901 PLUS_EXPR, atype));
10905 con0 = associate_trees (loc, con0, lit0, code, atype);
10906 return
10907 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10908 code, atype));
10912 return NULL_TREE;
10914 case MINUS_EXPR:
10915 /* Pointer simplifications for subtraction, simple reassociations. */
10916 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10918 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10919 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10920 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10922 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10923 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10924 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10925 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10926 return fold_build2_loc (loc, PLUS_EXPR, type,
10927 fold_build2_loc (loc, MINUS_EXPR, type,
10928 arg00, arg10),
10929 fold_build2_loc (loc, MINUS_EXPR, type,
10930 arg01, arg11));
10932 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10933 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10935 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10936 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10937 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10938 fold_convert_loc (loc, type, arg1));
10939 if (tmp)
10940 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10943 /* A - (-B) -> A + B */
10944 if (TREE_CODE (arg1) == NEGATE_EXPR)
10945 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10946 fold_convert_loc (loc, type,
10947 TREE_OPERAND (arg1, 0)));
10948 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10949 if (TREE_CODE (arg0) == NEGATE_EXPR
10950 && negate_expr_p (arg1)
10951 && reorder_operands_p (arg0, arg1))
10952 return fold_build2_loc (loc, MINUS_EXPR, type,
10953 fold_convert_loc (loc, type,
10954 negate_expr (arg1)),
10955 fold_convert_loc (loc, type,
10956 TREE_OPERAND (arg0, 0)));
10957 /* Convert -A - 1 to ~A. */
10958 if (TREE_CODE (type) != COMPLEX_TYPE
10959 && TREE_CODE (arg0) == NEGATE_EXPR
10960 && integer_onep (arg1)
10961 && !TYPE_OVERFLOW_TRAPS (type))
10962 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10963 fold_convert_loc (loc, type,
10964 TREE_OPERAND (arg0, 0)));
10966 /* Convert -1 - A to ~A. */
10967 if (TREE_CODE (type) != COMPLEX_TYPE
10968 && integer_all_onesp (arg0))
10969 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10972 /* X - (X / Y) * Y is X % Y. */
10973 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10974 && TREE_CODE (arg1) == MULT_EXPR
10975 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10976 && operand_equal_p (arg0,
10977 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10978 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10979 TREE_OPERAND (arg1, 1), 0))
10980 return
10981 fold_convert_loc (loc, type,
10982 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10983 arg0, TREE_OPERAND (arg1, 1)));
10985 if (! FLOAT_TYPE_P (type))
10987 if (integer_zerop (arg0))
10988 return negate_expr (fold_convert_loc (loc, type, arg1));
10989 if (integer_zerop (arg1))
10990 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10992 /* Fold A - (A & B) into ~B & A. */
10993 if (!TREE_SIDE_EFFECTS (arg0)
10994 && TREE_CODE (arg1) == BIT_AND_EXPR)
10996 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10998 tree arg10 = fold_convert_loc (loc, type,
10999 TREE_OPERAND (arg1, 0));
11000 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11001 fold_build1_loc (loc, BIT_NOT_EXPR,
11002 type, arg10),
11003 fold_convert_loc (loc, type, arg0));
11005 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11007 tree arg11 = fold_convert_loc (loc,
11008 type, TREE_OPERAND (arg1, 1));
11009 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11010 fold_build1_loc (loc, BIT_NOT_EXPR,
11011 type, arg11),
11012 fold_convert_loc (loc, type, arg0));
11016 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
11017 any power of 2 minus 1. */
11018 if (TREE_CODE (arg0) == BIT_AND_EXPR
11019 && TREE_CODE (arg1) == BIT_AND_EXPR
11020 && operand_equal_p (TREE_OPERAND (arg0, 0),
11021 TREE_OPERAND (arg1, 0), 0))
11023 tree mask0 = TREE_OPERAND (arg0, 1);
11024 tree mask1 = TREE_OPERAND (arg1, 1);
11025 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
11027 if (operand_equal_p (tem, mask1, 0))
11029 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
11030 TREE_OPERAND (arg0, 0), mask1);
11031 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
11036 /* See if ARG1 is zero and X - ARG1 reduces to X. */
11037 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
11038 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11040 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
11041 ARG0 is zero and X + ARG0 reduces to X, since that would mean
11042 (-ARG1 + ARG0) reduces to -ARG1. */
11043 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
11044 return negate_expr (fold_convert_loc (loc, type, arg1));
11046 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11047 __complex__ ( x, -y ). This is not the same for SNaNs or if
11048 signed zeros are involved. */
11049 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11050 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11051 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11053 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11054 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11055 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11056 bool arg0rz = false, arg0iz = false;
11057 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11058 || (arg0i && (arg0iz = real_zerop (arg0i))))
11060 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11061 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11062 if (arg0rz && arg1i && real_zerop (arg1i))
11064 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11065 arg1r ? arg1r
11066 : build1 (REALPART_EXPR, rtype, arg1));
11067 tree ip = arg0i ? arg0i
11068 : build1 (IMAGPART_EXPR, rtype, arg0);
11069 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11071 else if (arg0iz && arg1r && real_zerop (arg1r))
11073 tree rp = arg0r ? arg0r
11074 : build1 (REALPART_EXPR, rtype, arg0);
11075 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11076 arg1i ? arg1i
11077 : build1 (IMAGPART_EXPR, rtype, arg1));
11078 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11083 /* Fold &x - &x. This can happen from &x.foo - &x.
11084 This is unsafe for certain floats even in non-IEEE formats.
11085 In IEEE, it is unsafe because it does wrong for NaNs.
11086 Also note that operand_equal_p is always false if an operand
11087 is volatile. */
11089 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
11090 && operand_equal_p (arg0, arg1, 0))
11091 return build_zero_cst (type);
11093 /* A - B -> A + (-B) if B is easily negatable. */
11094 if (negate_expr_p (arg1)
11095 && ((FLOAT_TYPE_P (type)
11096 /* Avoid this transformation if B is a positive REAL_CST. */
11097 && (TREE_CODE (arg1) != REAL_CST
11098 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
11099 || INTEGRAL_TYPE_P (type)))
11100 return fold_build2_loc (loc, PLUS_EXPR, type,
11101 fold_convert_loc (loc, type, arg0),
11102 fold_convert_loc (loc, type,
11103 negate_expr (arg1)));
11105 /* Try folding difference of addresses. */
11107 HOST_WIDE_INT diff;
11109 if ((TREE_CODE (arg0) == ADDR_EXPR
11110 || TREE_CODE (arg1) == ADDR_EXPR)
11111 && ptr_difference_const (arg0, arg1, &diff))
11112 return build_int_cst_type (type, diff);
11115 /* Fold &a[i] - &a[j] to i-j. */
11116 if (TREE_CODE (arg0) == ADDR_EXPR
11117 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11118 && TREE_CODE (arg1) == ADDR_EXPR
11119 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11121 tree tem = fold_addr_of_array_ref_difference (loc, type,
11122 TREE_OPERAND (arg0, 0),
11123 TREE_OPERAND (arg1, 0));
11124 if (tem)
11125 return tem;
11128 if (FLOAT_TYPE_P (type)
11129 && flag_unsafe_math_optimizations
11130 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11131 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11132 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11133 return tem;
11135 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11136 one. Make sure the type is not saturating and has the signedness of
11137 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11138 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11139 if ((TREE_CODE (arg0) == MULT_EXPR
11140 || TREE_CODE (arg1) == MULT_EXPR)
11141 && !TYPE_SATURATING (type)
11142 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11143 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11144 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11146 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11147 if (tem)
11148 return tem;
11151 goto associate;
11153 case MULT_EXPR:
11154 /* (-A) * (-B) -> A * B */
11155 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11156 return fold_build2_loc (loc, MULT_EXPR, type,
11157 fold_convert_loc (loc, type,
11158 TREE_OPERAND (arg0, 0)),
11159 fold_convert_loc (loc, type,
11160 negate_expr (arg1)));
11161 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11162 return fold_build2_loc (loc, MULT_EXPR, type,
11163 fold_convert_loc (loc, type,
11164 negate_expr (arg0)),
11165 fold_convert_loc (loc, type,
11166 TREE_OPERAND (arg1, 0)));
11168 if (! FLOAT_TYPE_P (type))
11170 if (integer_zerop (arg1))
11171 return omit_one_operand_loc (loc, type, arg1, arg0);
11172 if (integer_onep (arg1))
11173 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11174 /* Transform x * -1 into -x. Make sure to do the negation
11175 on the original operand with conversions not stripped
11176 because we can only strip non-sign-changing conversions. */
11177 if (integer_minus_onep (arg1))
11178 return fold_convert_loc (loc, type, negate_expr (op0));
11179 /* Transform x * -C into -x * C if x is easily negatable. */
11180 if (TREE_CODE (arg1) == INTEGER_CST
11181 && tree_int_cst_sgn (arg1) == -1
11182 && negate_expr_p (arg0)
11183 && (tem = negate_expr (arg1)) != arg1
11184 && !TREE_OVERFLOW (tem))
11185 return fold_build2_loc (loc, MULT_EXPR, type,
11186 fold_convert_loc (loc, type,
11187 negate_expr (arg0)),
11188 tem);
11190 /* (a * (1 << b)) is (a << b) */
11191 if (TREE_CODE (arg1) == LSHIFT_EXPR
11192 && integer_onep (TREE_OPERAND (arg1, 0)))
11193 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11194 TREE_OPERAND (arg1, 1));
11195 if (TREE_CODE (arg0) == LSHIFT_EXPR
11196 && integer_onep (TREE_OPERAND (arg0, 0)))
11197 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11198 TREE_OPERAND (arg0, 1));
11200 /* (A + A) * C -> A * 2 * C */
11201 if (TREE_CODE (arg0) == PLUS_EXPR
11202 && TREE_CODE (arg1) == INTEGER_CST
11203 && operand_equal_p (TREE_OPERAND (arg0, 0),
11204 TREE_OPERAND (arg0, 1), 0))
11205 return fold_build2_loc (loc, MULT_EXPR, type,
11206 omit_one_operand_loc (loc, type,
11207 TREE_OPERAND (arg0, 0),
11208 TREE_OPERAND (arg0, 1)),
11209 fold_build2_loc (loc, MULT_EXPR, type,
11210 build_int_cst (type, 2) , arg1));
11212 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11213 sign-changing only. */
11214 if (TREE_CODE (arg1) == INTEGER_CST
11215 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11216 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11217 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11219 strict_overflow_p = false;
11220 if (TREE_CODE (arg1) == INTEGER_CST
11221 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11222 &strict_overflow_p)))
11224 if (strict_overflow_p)
11225 fold_overflow_warning (("assuming signed overflow does not "
11226 "occur when simplifying "
11227 "multiplication"),
11228 WARN_STRICT_OVERFLOW_MISC);
11229 return fold_convert_loc (loc, type, tem);
11232 /* Optimize z * conj(z) for integer complex numbers. */
11233 if (TREE_CODE (arg0) == CONJ_EXPR
11234 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11235 return fold_mult_zconjz (loc, type, arg1);
11236 if (TREE_CODE (arg1) == CONJ_EXPR
11237 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11238 return fold_mult_zconjz (loc, type, arg0);
11240 else
11242 /* Maybe fold x * 0 to 0. The expressions aren't the same
11243 when x is NaN, since x * 0 is also NaN. Nor are they the
11244 same in modes with signed zeros, since multiplying a
11245 negative value by 0 gives -0, not +0. */
11246 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11247 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11248 && real_zerop (arg1))
11249 return omit_one_operand_loc (loc, type, arg1, arg0);
11250 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11251 Likewise for complex arithmetic with signed zeros. */
11252 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11253 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11254 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11255 && real_onep (arg1))
11256 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11258 /* Transform x * -1.0 into -x. */
11259 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11260 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11261 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11262 && real_minus_onep (arg1))
11263 return fold_convert_loc (loc, type, negate_expr (arg0));
11265 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11266 the result for floating point types due to rounding so it is applied
11267 only if -fassociative-math was specify. */
11268 if (flag_associative_math
11269 && TREE_CODE (arg0) == RDIV_EXPR
11270 && TREE_CODE (arg1) == REAL_CST
11271 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11273 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11274 arg1);
11275 if (tem)
11276 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11277 TREE_OPERAND (arg0, 1));
11280 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11281 if (operand_equal_p (arg0, arg1, 0))
11283 tree tem = fold_strip_sign_ops (arg0);
11284 if (tem != NULL_TREE)
11286 tem = fold_convert_loc (loc, type, tem);
11287 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11291 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11292 This is not the same for NaNs or if signed zeros are
11293 involved. */
11294 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11295 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11296 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11297 && TREE_CODE (arg1) == COMPLEX_CST
11298 && real_zerop (TREE_REALPART (arg1)))
11300 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11301 if (real_onep (TREE_IMAGPART (arg1)))
11302 return
11303 fold_build2_loc (loc, COMPLEX_EXPR, type,
11304 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11305 rtype, arg0)),
11306 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11307 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11308 return
11309 fold_build2_loc (loc, COMPLEX_EXPR, type,
11310 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11311 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11312 rtype, arg0)));
11315 /* Optimize z * conj(z) for floating point complex numbers.
11316 Guarded by flag_unsafe_math_optimizations as non-finite
11317 imaginary components don't produce scalar results. */
11318 if (flag_unsafe_math_optimizations
11319 && TREE_CODE (arg0) == CONJ_EXPR
11320 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11321 return fold_mult_zconjz (loc, type, arg1);
11322 if (flag_unsafe_math_optimizations
11323 && TREE_CODE (arg1) == CONJ_EXPR
11324 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11325 return fold_mult_zconjz (loc, type, arg0);
11327 if (flag_unsafe_math_optimizations)
11329 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11330 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11332 /* Optimizations of root(...)*root(...). */
11333 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11335 tree rootfn, arg;
11336 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11337 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11339 /* Optimize sqrt(x)*sqrt(x) as x. */
11340 if (BUILTIN_SQRT_P (fcode0)
11341 && operand_equal_p (arg00, arg10, 0)
11342 && ! HONOR_SNANS (TYPE_MODE (type)))
11343 return arg00;
11345 /* Optimize root(x)*root(y) as root(x*y). */
11346 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11347 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11348 return build_call_expr_loc (loc, rootfn, 1, arg);
11351 /* Optimize expN(x)*expN(y) as expN(x+y). */
11352 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11354 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11355 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11356 CALL_EXPR_ARG (arg0, 0),
11357 CALL_EXPR_ARG (arg1, 0));
11358 return build_call_expr_loc (loc, expfn, 1, arg);
11361 /* Optimizations of pow(...)*pow(...). */
11362 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11363 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11364 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11366 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11367 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11368 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11369 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11371 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11372 if (operand_equal_p (arg01, arg11, 0))
11374 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11375 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11376 arg00, arg10);
11377 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11380 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11381 if (operand_equal_p (arg00, arg10, 0))
11383 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11384 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11385 arg01, arg11);
11386 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11390 /* Optimize tan(x)*cos(x) as sin(x). */
11391 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11392 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11393 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11394 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11395 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11396 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11397 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11398 CALL_EXPR_ARG (arg1, 0), 0))
11400 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11402 if (sinfn != NULL_TREE)
11403 return build_call_expr_loc (loc, sinfn, 1,
11404 CALL_EXPR_ARG (arg0, 0));
11407 /* Optimize x*pow(x,c) as pow(x,c+1). */
11408 if (fcode1 == BUILT_IN_POW
11409 || fcode1 == BUILT_IN_POWF
11410 || fcode1 == BUILT_IN_POWL)
11412 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11413 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11414 if (TREE_CODE (arg11) == REAL_CST
11415 && !TREE_OVERFLOW (arg11)
11416 && operand_equal_p (arg0, arg10, 0))
11418 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11419 REAL_VALUE_TYPE c;
11420 tree arg;
11422 c = TREE_REAL_CST (arg11);
11423 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11424 arg = build_real (type, c);
11425 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11429 /* Optimize pow(x,c)*x as pow(x,c+1). */
11430 if (fcode0 == BUILT_IN_POW
11431 || fcode0 == BUILT_IN_POWF
11432 || fcode0 == BUILT_IN_POWL)
11434 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11435 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11436 if (TREE_CODE (arg01) == REAL_CST
11437 && !TREE_OVERFLOW (arg01)
11438 && operand_equal_p (arg1, arg00, 0))
11440 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11441 REAL_VALUE_TYPE c;
11442 tree arg;
11444 c = TREE_REAL_CST (arg01);
11445 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11446 arg = build_real (type, c);
11447 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11451 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11452 if (!in_gimple_form
11453 && optimize
11454 && operand_equal_p (arg0, arg1, 0))
11456 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11458 if (powfn)
11460 tree arg = build_real (type, dconst2);
11461 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11466 goto associate;
11468 case BIT_IOR_EXPR:
11469 bit_ior:
11470 if (integer_all_onesp (arg1))
11471 return omit_one_operand_loc (loc, type, arg1, arg0);
11472 if (integer_zerop (arg1))
11473 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11474 if (operand_equal_p (arg0, arg1, 0))
11475 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11477 /* ~X | X is -1. */
11478 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11479 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11481 t1 = build_zero_cst (type);
11482 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11483 return omit_one_operand_loc (loc, type, t1, arg1);
11486 /* X | ~X is -1. */
11487 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11488 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11490 t1 = build_zero_cst (type);
11491 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11492 return omit_one_operand_loc (loc, type, t1, arg0);
11495 /* Canonicalize (X & C1) | C2. */
11496 if (TREE_CODE (arg0) == BIT_AND_EXPR
11497 && TREE_CODE (arg1) == INTEGER_CST
11498 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11500 double_int c1, c2, c3, msk;
11501 int width = TYPE_PRECISION (type), w;
11502 bool try_simplify = true;
11504 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11505 c2 = tree_to_double_int (arg1);
11507 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11508 if ((c1 & c2) == c1)
11509 return omit_one_operand_loc (loc, type, arg1,
11510 TREE_OPERAND (arg0, 0));
11512 msk = double_int::mask (width);
11514 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11515 if (msk.and_not (c1 | c2).is_zero ())
11516 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11517 TREE_OPERAND (arg0, 0), arg1);
11519 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11520 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11521 mode which allows further optimizations. */
11522 c1 &= msk;
11523 c2 &= msk;
11524 c3 = c1.and_not (c2);
11525 for (w = BITS_PER_UNIT;
11526 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11527 w <<= 1)
11529 unsigned HOST_WIDE_INT mask
11530 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11531 if (((c1.low | c2.low) & mask) == mask
11532 && (c1.low & ~mask) == 0 && c1.high == 0)
11534 c3 = double_int::from_uhwi (mask);
11535 break;
11539 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11540 with that optimization from the BIT_AND_EXPR optimizations.
11541 This could end up in an infinite recursion. */
11542 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11543 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11544 == INTEGER_CST)
11546 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11547 double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
11549 try_simplify = (masked != c1);
11552 if (try_simplify && c3 != c1)
11553 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11554 fold_build2_loc (loc, BIT_AND_EXPR, type,
11555 TREE_OPERAND (arg0, 0),
11556 double_int_to_tree (type,
11557 c3)),
11558 arg1);
11561 /* (X & Y) | Y is (X, Y). */
11562 if (TREE_CODE (arg0) == BIT_AND_EXPR
11563 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11564 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11565 /* (X & Y) | X is (Y, X). */
11566 if (TREE_CODE (arg0) == BIT_AND_EXPR
11567 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11568 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11569 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11570 /* X | (X & Y) is (Y, X). */
11571 if (TREE_CODE (arg1) == BIT_AND_EXPR
11572 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11573 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11574 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11575 /* X | (Y & X) is (Y, X). */
11576 if (TREE_CODE (arg1) == BIT_AND_EXPR
11577 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11578 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11579 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11581 /* (X & ~Y) | (~X & Y) is X ^ Y */
11582 if (TREE_CODE (arg0) == BIT_AND_EXPR
11583 && TREE_CODE (arg1) == BIT_AND_EXPR)
11585 tree a0, a1, l0, l1, n0, n1;
11587 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11588 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11590 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11591 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11593 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11594 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11596 if ((operand_equal_p (n0, a0, 0)
11597 && operand_equal_p (n1, a1, 0))
11598 || (operand_equal_p (n0, a1, 0)
11599 && operand_equal_p (n1, a0, 0)))
11600 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11603 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11604 if (t1 != NULL_TREE)
11605 return t1;
11607 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11609 This results in more efficient code for machines without a NAND
11610 instruction. Combine will canonicalize to the first form
11611 which will allow use of NAND instructions provided by the
11612 backend if they exist. */
11613 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11614 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11616 return
11617 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11618 build2 (BIT_AND_EXPR, type,
11619 fold_convert_loc (loc, type,
11620 TREE_OPERAND (arg0, 0)),
11621 fold_convert_loc (loc, type,
11622 TREE_OPERAND (arg1, 0))));
11625 /* See if this can be simplified into a rotate first. If that
11626 is unsuccessful continue in the association code. */
11627 goto bit_rotate;
11629 case BIT_XOR_EXPR:
11630 if (integer_zerop (arg1))
11631 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11632 if (integer_all_onesp (arg1))
11633 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11634 if (operand_equal_p (arg0, arg1, 0))
11635 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11637 /* ~X ^ X is -1. */
11638 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11639 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11641 t1 = build_zero_cst (type);
11642 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11643 return omit_one_operand_loc (loc, type, t1, arg1);
11646 /* X ^ ~X is -1. */
11647 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11648 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11650 t1 = build_zero_cst (type);
11651 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11652 return omit_one_operand_loc (loc, type, t1, arg0);
11655 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11656 with a constant, and the two constants have no bits in common,
11657 we should treat this as a BIT_IOR_EXPR since this may produce more
11658 simplifications. */
11659 if (TREE_CODE (arg0) == BIT_AND_EXPR
11660 && TREE_CODE (arg1) == BIT_AND_EXPR
11661 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11662 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11663 && integer_zerop (const_binop (BIT_AND_EXPR,
11664 TREE_OPERAND (arg0, 1),
11665 TREE_OPERAND (arg1, 1))))
11667 code = BIT_IOR_EXPR;
11668 goto bit_ior;
11671 /* (X | Y) ^ X -> Y & ~ X*/
11672 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11673 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11675 tree t2 = TREE_OPERAND (arg0, 1);
11676 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11677 arg1);
11678 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11679 fold_convert_loc (loc, type, t2),
11680 fold_convert_loc (loc, type, t1));
11681 return t1;
11684 /* (Y | X) ^ X -> Y & ~ X*/
11685 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11686 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11688 tree t2 = TREE_OPERAND (arg0, 0);
11689 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11690 arg1);
11691 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11692 fold_convert_loc (loc, type, t2),
11693 fold_convert_loc (loc, type, t1));
11694 return t1;
11697 /* X ^ (X | Y) -> Y & ~ X*/
11698 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11699 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11701 tree t2 = TREE_OPERAND (arg1, 1);
11702 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11703 arg0);
11704 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11705 fold_convert_loc (loc, type, t2),
11706 fold_convert_loc (loc, type, t1));
11707 return t1;
11710 /* X ^ (Y | X) -> Y & ~ X*/
11711 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11712 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11714 tree t2 = TREE_OPERAND (arg1, 0);
11715 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11716 arg0);
11717 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11718 fold_convert_loc (loc, type, t2),
11719 fold_convert_loc (loc, type, t1));
11720 return t1;
11723 /* Convert ~X ^ ~Y to X ^ Y. */
11724 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11725 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11726 return fold_build2_loc (loc, code, type,
11727 fold_convert_loc (loc, type,
11728 TREE_OPERAND (arg0, 0)),
11729 fold_convert_loc (loc, type,
11730 TREE_OPERAND (arg1, 0)));
11732 /* Convert ~X ^ C to X ^ ~C. */
11733 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11734 && TREE_CODE (arg1) == INTEGER_CST)
11735 return fold_build2_loc (loc, code, type,
11736 fold_convert_loc (loc, type,
11737 TREE_OPERAND (arg0, 0)),
11738 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11740 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11741 if (TREE_CODE (arg0) == BIT_AND_EXPR
11742 && integer_onep (TREE_OPERAND (arg0, 1))
11743 && integer_onep (arg1))
11744 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11745 build_zero_cst (TREE_TYPE (arg0)));
11747 /* Fold (X & Y) ^ Y as ~X & Y. */
11748 if (TREE_CODE (arg0) == BIT_AND_EXPR
11749 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11751 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11752 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11753 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11754 fold_convert_loc (loc, type, arg1));
11756 /* Fold (X & Y) ^ X as ~Y & X. */
11757 if (TREE_CODE (arg0) == BIT_AND_EXPR
11758 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11759 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11761 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11762 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11763 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11764 fold_convert_loc (loc, type, arg1));
11766 /* Fold X ^ (X & Y) as X & ~Y. */
11767 if (TREE_CODE (arg1) == BIT_AND_EXPR
11768 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11770 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11771 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11772 fold_convert_loc (loc, type, arg0),
11773 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11775 /* Fold X ^ (Y & X) as ~Y & X. */
11776 if (TREE_CODE (arg1) == BIT_AND_EXPR
11777 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11778 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11780 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11781 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11782 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11783 fold_convert_loc (loc, type, arg0));
11786 /* See if this can be simplified into a rotate first. If that
11787 is unsuccessful continue in the association code. */
11788 goto bit_rotate;
11790 case BIT_AND_EXPR:
11791 if (integer_all_onesp (arg1))
11792 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11793 if (integer_zerop (arg1))
11794 return omit_one_operand_loc (loc, type, arg1, arg0);
11795 if (operand_equal_p (arg0, arg1, 0))
11796 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11798 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11799 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11800 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11801 || (TREE_CODE (arg0) == EQ_EXPR
11802 && integer_zerop (TREE_OPERAND (arg0, 1))))
11803 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11804 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11806 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11807 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11808 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11809 || (TREE_CODE (arg1) == EQ_EXPR
11810 && integer_zerop (TREE_OPERAND (arg1, 1))))
11811 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11812 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11814 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11815 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11816 && TREE_CODE (arg1) == INTEGER_CST
11817 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11819 tree tmp1 = fold_convert_loc (loc, type, arg1);
11820 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11821 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11822 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11823 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11824 return
11825 fold_convert_loc (loc, type,
11826 fold_build2_loc (loc, BIT_IOR_EXPR,
11827 type, tmp2, tmp3));
11830 /* (X | Y) & Y is (X, Y). */
11831 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11832 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11833 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11834 /* (X | Y) & X is (Y, X). */
11835 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11836 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11837 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11838 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11839 /* X & (X | Y) is (Y, X). */
11840 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11841 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11842 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11843 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11844 /* X & (Y | X) is (Y, X). */
11845 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11846 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11847 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11848 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11850 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11851 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11852 && integer_onep (TREE_OPERAND (arg0, 1))
11853 && integer_onep (arg1))
11855 tree tem2;
11856 tem = TREE_OPERAND (arg0, 0);
11857 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11858 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11859 tem, tem2);
11860 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11861 build_zero_cst (TREE_TYPE (tem)));
11863 /* Fold ~X & 1 as (X & 1) == 0. */
11864 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11865 && integer_onep (arg1))
11867 tree tem2;
11868 tem = TREE_OPERAND (arg0, 0);
11869 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11870 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11871 tem, tem2);
11872 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11873 build_zero_cst (TREE_TYPE (tem)));
11875 /* Fold !X & 1 as X == 0. */
11876 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11877 && integer_onep (arg1))
11879 tem = TREE_OPERAND (arg0, 0);
11880 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11881 build_zero_cst (TREE_TYPE (tem)));
11884 /* Fold (X ^ Y) & Y as ~X & Y. */
11885 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11886 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11888 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11889 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11890 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11891 fold_convert_loc (loc, type, arg1));
11893 /* Fold (X ^ Y) & X as ~Y & X. */
11894 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11895 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11896 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11898 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11899 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11900 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11901 fold_convert_loc (loc, type, arg1));
11903 /* Fold X & (X ^ Y) as X & ~Y. */
11904 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11905 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11907 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11908 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11909 fold_convert_loc (loc, type, arg0),
11910 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11912 /* Fold X & (Y ^ X) as ~Y & X. */
11913 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11914 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11915 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11917 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11918 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11919 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11920 fold_convert_loc (loc, type, arg0));
11923 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11924 multiple of 1 << CST. */
11925 if (TREE_CODE (arg1) == INTEGER_CST)
11927 double_int cst1 = tree_to_double_int (arg1);
11928 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11929 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11930 if ((cst1 & ncst1) == ncst1
11931 && multiple_of_p (type, arg0,
11932 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11933 return fold_convert_loc (loc, type, arg0);
11936 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11937 bits from CST2. */
11938 if (TREE_CODE (arg1) == INTEGER_CST
11939 && TREE_CODE (arg0) == MULT_EXPR
11940 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11942 double_int masked
11943 = mask_with_tz (type, tree_to_double_int (arg1),
11944 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11946 if (masked.is_zero ())
11947 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11948 arg0, arg1);
11949 else if (masked != tree_to_double_int (arg1))
11950 return fold_build2_loc (loc, code, type, op0,
11951 double_int_to_tree (type, masked));
11954 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11955 ((A & N) + B) & M -> (A + B) & M
11956 Similarly if (N & M) == 0,
11957 ((A | N) + B) & M -> (A + B) & M
11958 and for - instead of + (or unary - instead of +)
11959 and/or ^ instead of |.
11960 If B is constant and (B & M) == 0, fold into A & M. */
11961 if (tree_fits_uhwi_p (arg1))
11963 unsigned HOST_WIDE_INT cst1 = tree_to_uhwi (arg1);
11964 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11965 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11966 && (TREE_CODE (arg0) == PLUS_EXPR
11967 || TREE_CODE (arg0) == MINUS_EXPR
11968 || TREE_CODE (arg0) == NEGATE_EXPR)
11969 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11970 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11972 tree pmop[2];
11973 int which = 0;
11974 unsigned HOST_WIDE_INT cst0;
11976 /* Now we know that arg0 is (C + D) or (C - D) or
11977 -C and arg1 (M) is == (1LL << cst) - 1.
11978 Store C into PMOP[0] and D into PMOP[1]. */
11979 pmop[0] = TREE_OPERAND (arg0, 0);
11980 pmop[1] = NULL;
11981 if (TREE_CODE (arg0) != NEGATE_EXPR)
11983 pmop[1] = TREE_OPERAND (arg0, 1);
11984 which = 1;
11987 if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11988 || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11989 & cst1) != cst1)
11990 which = -1;
11992 for (; which >= 0; which--)
11993 switch (TREE_CODE (pmop[which]))
11995 case BIT_AND_EXPR:
11996 case BIT_IOR_EXPR:
11997 case BIT_XOR_EXPR:
11998 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11999 != INTEGER_CST)
12000 break;
12001 /* tree_to_[su]hwi not used, because we don't care about
12002 the upper bits. */
12003 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
12004 cst0 &= cst1;
12005 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
12007 if (cst0 != cst1)
12008 break;
12010 else if (cst0 != 0)
12011 break;
12012 /* If C or D is of the form (A & N) where
12013 (N & M) == M, or of the form (A | N) or
12014 (A ^ N) where (N & M) == 0, replace it with A. */
12015 pmop[which] = TREE_OPERAND (pmop[which], 0);
12016 break;
12017 case INTEGER_CST:
12018 /* If C or D is a N where (N & M) == 0, it can be
12019 omitted (assumed 0). */
12020 if ((TREE_CODE (arg0) == PLUS_EXPR
12021 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
12022 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
12023 pmop[which] = NULL;
12024 break;
12025 default:
12026 break;
12029 /* Only build anything new if we optimized one or both arguments
12030 above. */
12031 if (pmop[0] != TREE_OPERAND (arg0, 0)
12032 || (TREE_CODE (arg0) != NEGATE_EXPR
12033 && pmop[1] != TREE_OPERAND (arg0, 1)))
12035 tree utype = TREE_TYPE (arg0);
12036 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
12038 /* Perform the operations in a type that has defined
12039 overflow behavior. */
12040 utype = unsigned_type_for (TREE_TYPE (arg0));
12041 if (pmop[0] != NULL)
12042 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
12043 if (pmop[1] != NULL)
12044 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
12047 if (TREE_CODE (arg0) == NEGATE_EXPR)
12048 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
12049 else if (TREE_CODE (arg0) == PLUS_EXPR)
12051 if (pmop[0] != NULL && pmop[1] != NULL)
12052 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
12053 pmop[0], pmop[1]);
12054 else if (pmop[0] != NULL)
12055 tem = pmop[0];
12056 else if (pmop[1] != NULL)
12057 tem = pmop[1];
12058 else
12059 return build_int_cst (type, 0);
12061 else if (pmop[0] == NULL)
12062 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
12063 else
12064 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
12065 pmop[0], pmop[1]);
12066 /* TEM is now the new binary +, - or unary - replacement. */
12067 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
12068 fold_convert_loc (loc, utype, arg1));
12069 return fold_convert_loc (loc, type, tem);
12074 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
12075 if (t1 != NULL_TREE)
12076 return t1;
12077 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12078 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12079 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12081 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12083 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
12084 && (~TREE_INT_CST_LOW (arg1)
12085 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
12086 return
12087 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12090 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12092 This results in more efficient code for machines without a NOR
12093 instruction. Combine will canonicalize to the first form
12094 which will allow use of NOR instructions provided by the
12095 backend if they exist. */
12096 if (TREE_CODE (arg0) == BIT_NOT_EXPR
12097 && TREE_CODE (arg1) == BIT_NOT_EXPR)
12099 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
12100 build2 (BIT_IOR_EXPR, type,
12101 fold_convert_loc (loc, type,
12102 TREE_OPERAND (arg0, 0)),
12103 fold_convert_loc (loc, type,
12104 TREE_OPERAND (arg1, 0))));
12107 /* If arg0 is derived from the address of an object or function, we may
12108 be able to fold this expression using the object or function's
12109 alignment. */
12110 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
12112 unsigned HOST_WIDE_INT modulus, residue;
12113 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
12115 modulus = get_pointer_modulus_and_residue (arg0, &residue,
12116 integer_onep (arg1));
12118 /* This works because modulus is a power of 2. If this weren't the
12119 case, we'd have to replace it by its greatest power-of-2
12120 divisor: modulus & -modulus. */
12121 if (low < modulus)
12122 return build_int_cst (type, residue & low);
12125 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12126 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12127 if the new mask might be further optimized. */
12128 if ((TREE_CODE (arg0) == LSHIFT_EXPR
12129 || TREE_CODE (arg0) == RSHIFT_EXPR)
12130 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12131 && TREE_CODE (arg1) == INTEGER_CST
12132 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12133 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12134 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12135 < TYPE_PRECISION (TREE_TYPE (arg0))))
12137 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12138 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12139 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12140 tree shift_type = TREE_TYPE (arg0);
12142 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12143 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12144 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12145 && TYPE_PRECISION (TREE_TYPE (arg0))
12146 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12148 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12149 tree arg00 = TREE_OPERAND (arg0, 0);
12150 /* See if more bits can be proven as zero because of
12151 zero extension. */
12152 if (TREE_CODE (arg00) == NOP_EXPR
12153 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12155 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12156 if (TYPE_PRECISION (inner_type)
12157 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12158 && TYPE_PRECISION (inner_type) < prec)
12160 prec = TYPE_PRECISION (inner_type);
12161 /* See if we can shorten the right shift. */
12162 if (shiftc < prec)
12163 shift_type = inner_type;
12166 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12167 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12168 zerobits <<= prec - shiftc;
12169 /* For arithmetic shift if sign bit could be set, zerobits
12170 can contain actually sign bits, so no transformation is
12171 possible, unless MASK masks them all away. In that
12172 case the shift needs to be converted into logical shift. */
12173 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12174 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12176 if ((mask & zerobits) == 0)
12177 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12178 else
12179 zerobits = 0;
12183 /* ((X << 16) & 0xff00) is (X, 0). */
12184 if ((mask & zerobits) == mask)
12185 return omit_one_operand_loc (loc, type,
12186 build_int_cst (type, 0), arg0);
12188 newmask = mask | zerobits;
12189 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12191 /* Only do the transformation if NEWMASK is some integer
12192 mode's mask. */
12193 for (prec = BITS_PER_UNIT;
12194 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12195 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12196 break;
12197 if (prec < HOST_BITS_PER_WIDE_INT
12198 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12200 tree newmaskt;
12202 if (shift_type != TREE_TYPE (arg0))
12204 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12205 fold_convert_loc (loc, shift_type,
12206 TREE_OPERAND (arg0, 0)),
12207 TREE_OPERAND (arg0, 1));
12208 tem = fold_convert_loc (loc, type, tem);
12210 else
12211 tem = op0;
12212 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12213 if (!tree_int_cst_equal (newmaskt, arg1))
12214 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12219 goto associate;
12221 case RDIV_EXPR:
12222 /* Don't touch a floating-point divide by zero unless the mode
12223 of the constant can represent infinity. */
12224 if (TREE_CODE (arg1) == REAL_CST
12225 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12226 && real_zerop (arg1))
12227 return NULL_TREE;
12229 /* Optimize A / A to 1.0 if we don't care about
12230 NaNs or Infinities. Skip the transformation
12231 for non-real operands. */
12232 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12233 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12234 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12235 && operand_equal_p (arg0, arg1, 0))
12237 tree r = build_real (TREE_TYPE (arg0), dconst1);
12239 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12242 /* The complex version of the above A / A optimization. */
12243 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12244 && operand_equal_p (arg0, arg1, 0))
12246 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12247 if (! HONOR_NANS (TYPE_MODE (elem_type))
12248 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12250 tree r = build_real (elem_type, dconst1);
12251 /* omit_two_operands will call fold_convert for us. */
12252 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12256 /* (-A) / (-B) -> A / B */
12257 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12258 return fold_build2_loc (loc, RDIV_EXPR, type,
12259 TREE_OPERAND (arg0, 0),
12260 negate_expr (arg1));
12261 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12262 return fold_build2_loc (loc, RDIV_EXPR, type,
12263 negate_expr (arg0),
12264 TREE_OPERAND (arg1, 0));
12266 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12267 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12268 && real_onep (arg1))
12269 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12271 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12272 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12273 && real_minus_onep (arg1))
12274 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12275 negate_expr (arg0)));
12277 /* If ARG1 is a constant, we can convert this to a multiply by the
12278 reciprocal. This does not have the same rounding properties,
12279 so only do this if -freciprocal-math. We can actually
12280 always safely do it if ARG1 is a power of two, but it's hard to
12281 tell if it is or not in a portable manner. */
12282 if (optimize
12283 && (TREE_CODE (arg1) == REAL_CST
12284 || (TREE_CODE (arg1) == COMPLEX_CST
12285 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12286 || (TREE_CODE (arg1) == VECTOR_CST
12287 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12289 if (flag_reciprocal_math
12290 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12291 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12292 /* Find the reciprocal if optimizing and the result is exact.
12293 TODO: Complex reciprocal not implemented. */
12294 if (TREE_CODE (arg1) != COMPLEX_CST)
12296 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12298 if (inverse)
12299 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12302 /* Convert A/B/C to A/(B*C). */
12303 if (flag_reciprocal_math
12304 && TREE_CODE (arg0) == RDIV_EXPR)
12305 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12306 fold_build2_loc (loc, MULT_EXPR, type,
12307 TREE_OPERAND (arg0, 1), arg1));
12309 /* Convert A/(B/C) to (A/B)*C. */
12310 if (flag_reciprocal_math
12311 && TREE_CODE (arg1) == RDIV_EXPR)
12312 return fold_build2_loc (loc, MULT_EXPR, type,
12313 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12314 TREE_OPERAND (arg1, 0)),
12315 TREE_OPERAND (arg1, 1));
12317 /* Convert C1/(X*C2) into (C1/C2)/X. */
12318 if (flag_reciprocal_math
12319 && TREE_CODE (arg1) == MULT_EXPR
12320 && TREE_CODE (arg0) == REAL_CST
12321 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12323 tree tem = const_binop (RDIV_EXPR, arg0,
12324 TREE_OPERAND (arg1, 1));
12325 if (tem)
12326 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12327 TREE_OPERAND (arg1, 0));
12330 if (flag_unsafe_math_optimizations)
12332 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12333 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12335 /* Optimize sin(x)/cos(x) as tan(x). */
12336 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12337 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12338 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12339 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12340 CALL_EXPR_ARG (arg1, 0), 0))
12342 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12344 if (tanfn != NULL_TREE)
12345 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12348 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12349 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12350 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12351 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12352 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12353 CALL_EXPR_ARG (arg1, 0), 0))
12355 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12357 if (tanfn != NULL_TREE)
12359 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12360 CALL_EXPR_ARG (arg0, 0));
12361 return fold_build2_loc (loc, RDIV_EXPR, type,
12362 build_real (type, dconst1), tmp);
12366 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12367 NaNs or Infinities. */
12368 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12369 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12370 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12372 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12373 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12375 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12376 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12377 && operand_equal_p (arg00, arg01, 0))
12379 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12381 if (cosfn != NULL_TREE)
12382 return build_call_expr_loc (loc, cosfn, 1, arg00);
12386 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12387 NaNs or Infinities. */
12388 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12389 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12390 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12392 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12393 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12395 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12396 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12397 && operand_equal_p (arg00, arg01, 0))
12399 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12401 if (cosfn != NULL_TREE)
12403 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12404 return fold_build2_loc (loc, RDIV_EXPR, type,
12405 build_real (type, dconst1),
12406 tmp);
12411 /* Optimize pow(x,c)/x as pow(x,c-1). */
12412 if (fcode0 == BUILT_IN_POW
12413 || fcode0 == BUILT_IN_POWF
12414 || fcode0 == BUILT_IN_POWL)
12416 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12417 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12418 if (TREE_CODE (arg01) == REAL_CST
12419 && !TREE_OVERFLOW (arg01)
12420 && operand_equal_p (arg1, arg00, 0))
12422 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12423 REAL_VALUE_TYPE c;
12424 tree arg;
12426 c = TREE_REAL_CST (arg01);
12427 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12428 arg = build_real (type, c);
12429 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12433 /* Optimize a/root(b/c) into a*root(c/b). */
12434 if (BUILTIN_ROOT_P (fcode1))
12436 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12438 if (TREE_CODE (rootarg) == RDIV_EXPR)
12440 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12441 tree b = TREE_OPERAND (rootarg, 0);
12442 tree c = TREE_OPERAND (rootarg, 1);
12444 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12446 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12447 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12451 /* Optimize x/expN(y) into x*expN(-y). */
12452 if (BUILTIN_EXPONENT_P (fcode1))
12454 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12455 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12456 arg1 = build_call_expr_loc (loc,
12457 expfn, 1,
12458 fold_convert_loc (loc, type, arg));
12459 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12462 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12463 if (fcode1 == BUILT_IN_POW
12464 || fcode1 == BUILT_IN_POWF
12465 || fcode1 == BUILT_IN_POWL)
12467 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12468 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12469 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12470 tree neg11 = fold_convert_loc (loc, type,
12471 negate_expr (arg11));
12472 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12473 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12476 return NULL_TREE;
12478 case TRUNC_DIV_EXPR:
12479 /* Optimize (X & (-A)) / A where A is a power of 2,
12480 to X >> log2(A) */
12481 if (TREE_CODE (arg0) == BIT_AND_EXPR
12482 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12483 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12485 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12486 arg1, TREE_OPERAND (arg0, 1));
12487 if (sum && integer_zerop (sum)) {
12488 unsigned long pow2;
12490 if (TREE_INT_CST_LOW (arg1))
12491 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12492 else
12493 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12494 + HOST_BITS_PER_WIDE_INT;
12496 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12497 TREE_OPERAND (arg0, 0),
12498 build_int_cst (integer_type_node, pow2));
12502 /* Fall through */
12504 case FLOOR_DIV_EXPR:
12505 /* Simplify A / (B << N) where A and B are positive and B is
12506 a power of 2, to A >> (N + log2(B)). */
12507 strict_overflow_p = false;
12508 if (TREE_CODE (arg1) == LSHIFT_EXPR
12509 && (TYPE_UNSIGNED (type)
12510 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12512 tree sval = TREE_OPERAND (arg1, 0);
12513 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12515 tree sh_cnt = TREE_OPERAND (arg1, 1);
12516 unsigned long pow2;
12518 if (TREE_INT_CST_LOW (sval))
12519 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12520 else
12521 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12522 + HOST_BITS_PER_WIDE_INT;
12524 if (strict_overflow_p)
12525 fold_overflow_warning (("assuming signed overflow does not "
12526 "occur when simplifying A / (B << N)"),
12527 WARN_STRICT_OVERFLOW_MISC);
12529 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12530 sh_cnt,
12531 build_int_cst (TREE_TYPE (sh_cnt),
12532 pow2));
12533 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12534 fold_convert_loc (loc, type, arg0), sh_cnt);
12538 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12539 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12540 if (INTEGRAL_TYPE_P (type)
12541 && TYPE_UNSIGNED (type)
12542 && code == FLOOR_DIV_EXPR)
12543 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12545 /* Fall through */
12547 case ROUND_DIV_EXPR:
12548 case CEIL_DIV_EXPR:
12549 case EXACT_DIV_EXPR:
12550 if (integer_onep (arg1))
12551 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12552 if (integer_zerop (arg1))
12553 return NULL_TREE;
12554 /* X / -1 is -X. */
12555 if (!TYPE_UNSIGNED (type)
12556 && TREE_CODE (arg1) == INTEGER_CST
12557 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12558 && TREE_INT_CST_HIGH (arg1) == -1)
12559 return fold_convert_loc (loc, type, negate_expr (arg0));
12561 /* Convert -A / -B to A / B when the type is signed and overflow is
12562 undefined. */
12563 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12564 && TREE_CODE (arg0) == NEGATE_EXPR
12565 && negate_expr_p (arg1))
12567 if (INTEGRAL_TYPE_P (type))
12568 fold_overflow_warning (("assuming signed overflow does not occur "
12569 "when distributing negation across "
12570 "division"),
12571 WARN_STRICT_OVERFLOW_MISC);
12572 return fold_build2_loc (loc, code, type,
12573 fold_convert_loc (loc, type,
12574 TREE_OPERAND (arg0, 0)),
12575 fold_convert_loc (loc, type,
12576 negate_expr (arg1)));
12578 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12579 && TREE_CODE (arg1) == NEGATE_EXPR
12580 && negate_expr_p (arg0))
12582 if (INTEGRAL_TYPE_P (type))
12583 fold_overflow_warning (("assuming signed overflow does not occur "
12584 "when distributing negation across "
12585 "division"),
12586 WARN_STRICT_OVERFLOW_MISC);
12587 return fold_build2_loc (loc, code, type,
12588 fold_convert_loc (loc, type,
12589 negate_expr (arg0)),
12590 fold_convert_loc (loc, type,
12591 TREE_OPERAND (arg1, 0)));
12594 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12595 operation, EXACT_DIV_EXPR.
12597 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12598 At one time others generated faster code, it's not clear if they do
12599 after the last round to changes to the DIV code in expmed.c. */
12600 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12601 && multiple_of_p (type, arg0, arg1))
12602 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12604 strict_overflow_p = false;
12605 if (TREE_CODE (arg1) == INTEGER_CST
12606 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12607 &strict_overflow_p)))
12609 if (strict_overflow_p)
12610 fold_overflow_warning (("assuming signed overflow does not occur "
12611 "when simplifying division"),
12612 WARN_STRICT_OVERFLOW_MISC);
12613 return fold_convert_loc (loc, type, tem);
12616 return NULL_TREE;
12618 case CEIL_MOD_EXPR:
12619 case FLOOR_MOD_EXPR:
12620 case ROUND_MOD_EXPR:
12621 case TRUNC_MOD_EXPR:
12622 /* X % 1 is always zero, but be sure to preserve any side
12623 effects in X. */
12624 if (integer_onep (arg1))
12625 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12627 /* X % 0, return X % 0 unchanged so that we can get the
12628 proper warnings and errors. */
12629 if (integer_zerop (arg1))
12630 return NULL_TREE;
12632 /* 0 % X is always zero, but be sure to preserve any side
12633 effects in X. Place this after checking for X == 0. */
12634 if (integer_zerop (arg0))
12635 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12637 /* X % -1 is zero. */
12638 if (!TYPE_UNSIGNED (type)
12639 && TREE_CODE (arg1) == INTEGER_CST
12640 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12641 && TREE_INT_CST_HIGH (arg1) == -1)
12642 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12644 /* X % -C is the same as X % C. */
12645 if (code == TRUNC_MOD_EXPR
12646 && !TYPE_UNSIGNED (type)
12647 && TREE_CODE (arg1) == INTEGER_CST
12648 && !TREE_OVERFLOW (arg1)
12649 && TREE_INT_CST_HIGH (arg1) < 0
12650 && !TYPE_OVERFLOW_TRAPS (type)
12651 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12652 && !sign_bit_p (arg1, arg1))
12653 return fold_build2_loc (loc, code, type,
12654 fold_convert_loc (loc, type, arg0),
12655 fold_convert_loc (loc, type,
12656 negate_expr (arg1)));
12658 /* X % -Y is the same as X % Y. */
12659 if (code == TRUNC_MOD_EXPR
12660 && !TYPE_UNSIGNED (type)
12661 && TREE_CODE (arg1) == NEGATE_EXPR
12662 && !TYPE_OVERFLOW_TRAPS (type))
12663 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12664 fold_convert_loc (loc, type,
12665 TREE_OPERAND (arg1, 0)));
12667 strict_overflow_p = false;
12668 if (TREE_CODE (arg1) == INTEGER_CST
12669 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12670 &strict_overflow_p)))
12672 if (strict_overflow_p)
12673 fold_overflow_warning (("assuming signed overflow does not occur "
12674 "when simplifying modulus"),
12675 WARN_STRICT_OVERFLOW_MISC);
12676 return fold_convert_loc (loc, type, tem);
12679 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12680 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12681 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12682 && (TYPE_UNSIGNED (type)
12683 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12685 tree c = arg1;
12686 /* Also optimize A % (C << N) where C is a power of 2,
12687 to A & ((C << N) - 1). */
12688 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12689 c = TREE_OPERAND (arg1, 0);
12691 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12693 tree mask
12694 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12695 build_int_cst (TREE_TYPE (arg1), 1));
12696 if (strict_overflow_p)
12697 fold_overflow_warning (("assuming signed overflow does not "
12698 "occur when simplifying "
12699 "X % (power of two)"),
12700 WARN_STRICT_OVERFLOW_MISC);
12701 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12702 fold_convert_loc (loc, type, arg0),
12703 fold_convert_loc (loc, type, mask));
12707 return NULL_TREE;
12709 case LROTATE_EXPR:
12710 case RROTATE_EXPR:
12711 if (integer_all_onesp (arg0))
12712 return omit_one_operand_loc (loc, type, arg0, arg1);
12713 goto shift;
12715 case RSHIFT_EXPR:
12716 /* Optimize -1 >> x for arithmetic right shifts. */
12717 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12718 && tree_expr_nonnegative_p (arg1))
12719 return omit_one_operand_loc (loc, type, arg0, arg1);
12720 /* ... fall through ... */
12722 case LSHIFT_EXPR:
12723 shift:
12724 if (integer_zerop (arg1))
12725 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12726 if (integer_zerop (arg0))
12727 return omit_one_operand_loc (loc, type, arg0, arg1);
12729 /* Prefer vector1 << scalar to vector1 << vector2
12730 if vector2 is uniform. */
12731 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12732 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12733 return fold_build2_loc (loc, code, type, op0, tem);
12735 /* Since negative shift count is not well-defined,
12736 don't try to compute it in the compiler. */
12737 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12738 return NULL_TREE;
12740 prec = element_precision (type);
12742 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12743 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12744 && tree_to_uhwi (arg1) < prec
12745 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12746 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12748 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12749 + tree_to_uhwi (arg1));
12751 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12752 being well defined. */
12753 if (low >= prec)
12755 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12756 low = low % prec;
12757 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12758 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12759 TREE_OPERAND (arg0, 0));
12760 else
12761 low = prec - 1;
12764 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12765 build_int_cst (TREE_TYPE (arg1), low));
12768 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12769 into x & ((unsigned)-1 >> c) for unsigned types. */
12770 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12771 || (TYPE_UNSIGNED (type)
12772 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12773 && tree_fits_uhwi_p (arg1)
12774 && tree_to_uhwi (arg1) < prec
12775 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12776 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12778 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12779 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12780 tree lshift;
12781 tree arg00;
12783 if (low0 == low1)
12785 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12787 lshift = build_minus_one_cst (type);
12788 lshift = const_binop (code, lshift, arg1);
12790 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12794 /* Rewrite an LROTATE_EXPR by a constant into an
12795 RROTATE_EXPR by a new constant. */
12796 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12798 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12799 tem = const_binop (MINUS_EXPR, tem, arg1);
12800 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12803 /* If we have a rotate of a bit operation with the rotate count and
12804 the second operand of the bit operation both constant,
12805 permute the two operations. */
12806 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12807 && (TREE_CODE (arg0) == BIT_AND_EXPR
12808 || TREE_CODE (arg0) == BIT_IOR_EXPR
12809 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12810 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12811 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12812 fold_build2_loc (loc, code, type,
12813 TREE_OPERAND (arg0, 0), arg1),
12814 fold_build2_loc (loc, code, type,
12815 TREE_OPERAND (arg0, 1), arg1));
12817 /* Two consecutive rotates adding up to the precision of the
12818 type can be ignored. */
12819 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12820 && TREE_CODE (arg0) == RROTATE_EXPR
12821 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12822 && TREE_INT_CST_HIGH (arg1) == 0
12823 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12824 && ((TREE_INT_CST_LOW (arg1)
12825 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12826 == prec))
12827 return TREE_OPERAND (arg0, 0);
12829 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12830 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12831 if the latter can be further optimized. */
12832 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12833 && TREE_CODE (arg0) == BIT_AND_EXPR
12834 && TREE_CODE (arg1) == INTEGER_CST
12835 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12837 tree mask = fold_build2_loc (loc, code, type,
12838 fold_convert_loc (loc, type,
12839 TREE_OPERAND (arg0, 1)),
12840 arg1);
12841 tree shift = fold_build2_loc (loc, code, type,
12842 fold_convert_loc (loc, type,
12843 TREE_OPERAND (arg0, 0)),
12844 arg1);
12845 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12846 if (tem)
12847 return tem;
12850 return NULL_TREE;
12852 case MIN_EXPR:
12853 if (operand_equal_p (arg0, arg1, 0))
12854 return omit_one_operand_loc (loc, type, arg0, arg1);
12855 if (INTEGRAL_TYPE_P (type)
12856 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12857 return omit_one_operand_loc (loc, type, arg1, arg0);
12858 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12859 if (tem)
12860 return tem;
12861 goto associate;
12863 case MAX_EXPR:
12864 if (operand_equal_p (arg0, arg1, 0))
12865 return omit_one_operand_loc (loc, type, arg0, arg1);
12866 if (INTEGRAL_TYPE_P (type)
12867 && TYPE_MAX_VALUE (type)
12868 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12869 return omit_one_operand_loc (loc, type, arg1, arg0);
12870 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12871 if (tem)
12872 return tem;
12873 goto associate;
12875 case TRUTH_ANDIF_EXPR:
12876 /* Note that the operands of this must be ints
12877 and their values must be 0 or 1.
12878 ("true" is a fixed value perhaps depending on the language.) */
12879 /* If first arg is constant zero, return it. */
12880 if (integer_zerop (arg0))
12881 return fold_convert_loc (loc, type, arg0);
12882 case TRUTH_AND_EXPR:
12883 /* If either arg is constant true, drop it. */
12884 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12885 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12886 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12887 /* Preserve sequence points. */
12888 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12889 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12890 /* If second arg is constant zero, result is zero, but first arg
12891 must be evaluated. */
12892 if (integer_zerop (arg1))
12893 return omit_one_operand_loc (loc, type, arg1, arg0);
12894 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12895 case will be handled here. */
12896 if (integer_zerop (arg0))
12897 return omit_one_operand_loc (loc, type, arg0, arg1);
12899 /* !X && X is always false. */
12900 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12901 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12902 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12903 /* X && !X is always false. */
12904 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12905 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12906 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12908 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12909 means A >= Y && A != MAX, but in this case we know that
12910 A < X <= MAX. */
12912 if (!TREE_SIDE_EFFECTS (arg0)
12913 && !TREE_SIDE_EFFECTS (arg1))
12915 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12916 if (tem && !operand_equal_p (tem, arg0, 0))
12917 return fold_build2_loc (loc, code, type, tem, arg1);
12919 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12920 if (tem && !operand_equal_p (tem, arg1, 0))
12921 return fold_build2_loc (loc, code, type, arg0, tem);
12924 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12925 != NULL_TREE)
12926 return tem;
12928 return NULL_TREE;
12930 case TRUTH_ORIF_EXPR:
12931 /* Note that the operands of this must be ints
12932 and their values must be 0 or true.
12933 ("true" is a fixed value perhaps depending on the language.) */
12934 /* If first arg is constant true, return it. */
12935 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12936 return fold_convert_loc (loc, type, arg0);
12937 case TRUTH_OR_EXPR:
12938 /* If either arg is constant zero, drop it. */
12939 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12940 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12941 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12942 /* Preserve sequence points. */
12943 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12944 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12945 /* If second arg is constant true, result is true, but we must
12946 evaluate first arg. */
12947 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12948 return omit_one_operand_loc (loc, type, arg1, arg0);
12949 /* Likewise for first arg, but note this only occurs here for
12950 TRUTH_OR_EXPR. */
12951 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12952 return omit_one_operand_loc (loc, type, arg0, arg1);
12954 /* !X || X is always true. */
12955 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12956 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12957 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12958 /* X || !X is always true. */
12959 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12960 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12961 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12963 /* (X && !Y) || (!X && Y) is X ^ Y */
12964 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12965 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12967 tree a0, a1, l0, l1, n0, n1;
12969 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12970 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12972 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12973 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12975 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12976 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12978 if ((operand_equal_p (n0, a0, 0)
12979 && operand_equal_p (n1, a1, 0))
12980 || (operand_equal_p (n0, a1, 0)
12981 && operand_equal_p (n1, a0, 0)))
12982 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12985 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12986 != NULL_TREE)
12987 return tem;
12989 return NULL_TREE;
12991 case TRUTH_XOR_EXPR:
12992 /* If the second arg is constant zero, drop it. */
12993 if (integer_zerop (arg1))
12994 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12995 /* If the second arg is constant true, this is a logical inversion. */
12996 if (integer_onep (arg1))
12998 tem = invert_truthvalue_loc (loc, arg0);
12999 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
13001 /* Identical arguments cancel to zero. */
13002 if (operand_equal_p (arg0, arg1, 0))
13003 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13005 /* !X ^ X is always true. */
13006 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
13007 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
13008 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
13010 /* X ^ !X is always true. */
13011 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
13012 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
13013 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13015 return NULL_TREE;
13017 case EQ_EXPR:
13018 case NE_EXPR:
13019 STRIP_NOPS (arg0);
13020 STRIP_NOPS (arg1);
13022 tem = fold_comparison (loc, code, type, op0, op1);
13023 if (tem != NULL_TREE)
13024 return tem;
13026 /* bool_var != 0 becomes bool_var. */
13027 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
13028 && code == NE_EXPR)
13029 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
13031 /* bool_var == 1 becomes bool_var. */
13032 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
13033 && code == EQ_EXPR)
13034 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
13036 /* bool_var != 1 becomes !bool_var. */
13037 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
13038 && code == NE_EXPR)
13039 return fold_convert_loc (loc, type,
13040 fold_build1_loc (loc, TRUTH_NOT_EXPR,
13041 TREE_TYPE (arg0), arg0));
13043 /* bool_var == 0 becomes !bool_var. */
13044 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
13045 && code == EQ_EXPR)
13046 return fold_convert_loc (loc, type,
13047 fold_build1_loc (loc, TRUTH_NOT_EXPR,
13048 TREE_TYPE (arg0), arg0));
13050 /* !exp != 0 becomes !exp */
13051 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
13052 && code == NE_EXPR)
13053 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
13055 /* If this is an equality comparison of the address of two non-weak,
13056 unaliased symbols neither of which are extern (since we do not
13057 have access to attributes for externs), then we know the result. */
13058 if (TREE_CODE (arg0) == ADDR_EXPR
13059 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
13060 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
13061 && ! lookup_attribute ("alias",
13062 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
13063 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
13064 && TREE_CODE (arg1) == ADDR_EXPR
13065 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
13066 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
13067 && ! lookup_attribute ("alias",
13068 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
13069 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
13071 /* We know that we're looking at the address of two
13072 non-weak, unaliased, static _DECL nodes.
13074 It is both wasteful and incorrect to call operand_equal_p
13075 to compare the two ADDR_EXPR nodes. It is wasteful in that
13076 all we need to do is test pointer equality for the arguments
13077 to the two ADDR_EXPR nodes. It is incorrect to use
13078 operand_equal_p as that function is NOT equivalent to a
13079 C equality test. It can in fact return false for two
13080 objects which would test as equal using the C equality
13081 operator. */
13082 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
13083 return constant_boolean_node (equal
13084 ? code == EQ_EXPR : code != EQ_EXPR,
13085 type);
13088 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
13089 a MINUS_EXPR of a constant, we can convert it into a comparison with
13090 a revised constant as long as no overflow occurs. */
13091 if (TREE_CODE (arg1) == INTEGER_CST
13092 && (TREE_CODE (arg0) == PLUS_EXPR
13093 || TREE_CODE (arg0) == MINUS_EXPR)
13094 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13095 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
13096 ? MINUS_EXPR : PLUS_EXPR,
13097 fold_convert_loc (loc, TREE_TYPE (arg0),
13098 arg1),
13099 TREE_OPERAND (arg0, 1)))
13100 && !TREE_OVERFLOW (tem))
13101 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13103 /* Similarly for a NEGATE_EXPR. */
13104 if (TREE_CODE (arg0) == NEGATE_EXPR
13105 && TREE_CODE (arg1) == INTEGER_CST
13106 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
13107 arg1)))
13108 && TREE_CODE (tem) == INTEGER_CST
13109 && !TREE_OVERFLOW (tem))
13110 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13112 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13113 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13114 && TREE_CODE (arg1) == INTEGER_CST
13115 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13116 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13117 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
13118 fold_convert_loc (loc,
13119 TREE_TYPE (arg0),
13120 arg1),
13121 TREE_OPERAND (arg0, 1)));
13123 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13124 if ((TREE_CODE (arg0) == PLUS_EXPR
13125 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
13126 || TREE_CODE (arg0) == MINUS_EXPR)
13127 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13128 0)),
13129 arg1, 0)
13130 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13131 || POINTER_TYPE_P (TREE_TYPE (arg0))))
13133 tree val = TREE_OPERAND (arg0, 1);
13134 return omit_two_operands_loc (loc, type,
13135 fold_build2_loc (loc, code, type,
13136 val,
13137 build_int_cst (TREE_TYPE (val),
13138 0)),
13139 TREE_OPERAND (arg0, 0), arg1);
13142 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13143 if (TREE_CODE (arg0) == MINUS_EXPR
13144 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
13145 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13146 1)),
13147 arg1, 0)
13148 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
13150 return omit_two_operands_loc (loc, type,
13151 code == NE_EXPR
13152 ? boolean_true_node : boolean_false_node,
13153 TREE_OPERAND (arg0, 1), arg1);
13156 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13157 for !=. Don't do this for ordered comparisons due to overflow. */
13158 if (TREE_CODE (arg0) == MINUS_EXPR
13159 && integer_zerop (arg1))
13160 return fold_build2_loc (loc, code, type,
13161 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
13163 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13164 if (TREE_CODE (arg0) == ABS_EXPR
13165 && (integer_zerop (arg1) || real_zerop (arg1)))
13166 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13168 /* If this is an EQ or NE comparison with zero and ARG0 is
13169 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13170 two operations, but the latter can be done in one less insn
13171 on machines that have only two-operand insns or on which a
13172 constant cannot be the first operand. */
13173 if (TREE_CODE (arg0) == BIT_AND_EXPR
13174 && integer_zerop (arg1))
13176 tree arg00 = TREE_OPERAND (arg0, 0);
13177 tree arg01 = TREE_OPERAND (arg0, 1);
13178 if (TREE_CODE (arg00) == LSHIFT_EXPR
13179 && integer_onep (TREE_OPERAND (arg00, 0)))
13181 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13182 arg01, TREE_OPERAND (arg00, 1));
13183 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13184 build_int_cst (TREE_TYPE (arg0), 1));
13185 return fold_build2_loc (loc, code, type,
13186 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13187 arg1);
13189 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13190 && integer_onep (TREE_OPERAND (arg01, 0)))
13192 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13193 arg00, TREE_OPERAND (arg01, 1));
13194 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13195 build_int_cst (TREE_TYPE (arg0), 1));
13196 return fold_build2_loc (loc, code, type,
13197 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13198 arg1);
13202 /* If this is an NE or EQ comparison of zero against the result of a
13203 signed MOD operation whose second operand is a power of 2, make
13204 the MOD operation unsigned since it is simpler and equivalent. */
13205 if (integer_zerop (arg1)
13206 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13207 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13208 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13209 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13210 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13211 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13213 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13214 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13215 fold_convert_loc (loc, newtype,
13216 TREE_OPERAND (arg0, 0)),
13217 fold_convert_loc (loc, newtype,
13218 TREE_OPERAND (arg0, 1)));
13220 return fold_build2_loc (loc, code, type, newmod,
13221 fold_convert_loc (loc, newtype, arg1));
13224 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13225 C1 is a valid shift constant, and C2 is a power of two, i.e.
13226 a single bit. */
13227 if (TREE_CODE (arg0) == BIT_AND_EXPR
13228 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13229 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13230 == INTEGER_CST
13231 && integer_pow2p (TREE_OPERAND (arg0, 1))
13232 && integer_zerop (arg1))
13234 tree itype = TREE_TYPE (arg0);
13235 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13236 prec = TYPE_PRECISION (itype);
13238 /* Check for a valid shift count. */
13239 if (TREE_INT_CST_HIGH (arg001) == 0
13240 && TREE_INT_CST_LOW (arg001) < prec)
13242 tree arg01 = TREE_OPERAND (arg0, 1);
13243 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13244 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13245 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13246 can be rewritten as (X & (C2 << C1)) != 0. */
13247 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13249 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13250 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13251 return fold_build2_loc (loc, code, type, tem,
13252 fold_convert_loc (loc, itype, arg1));
13254 /* Otherwise, for signed (arithmetic) shifts,
13255 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13256 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13257 else if (!TYPE_UNSIGNED (itype))
13258 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13259 arg000, build_int_cst (itype, 0));
13260 /* Otherwise, of unsigned (logical) shifts,
13261 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13262 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13263 else
13264 return omit_one_operand_loc (loc, type,
13265 code == EQ_EXPR ? integer_one_node
13266 : integer_zero_node,
13267 arg000);
13271 /* If we have (A & C) == C where C is a power of 2, convert this into
13272 (A & C) != 0. Similarly for NE_EXPR. */
13273 if (TREE_CODE (arg0) == BIT_AND_EXPR
13274 && integer_pow2p (TREE_OPERAND (arg0, 1))
13275 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13276 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13277 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13278 integer_zero_node));
13280 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13281 bit, then fold the expression into A < 0 or A >= 0. */
13282 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13283 if (tem)
13284 return tem;
13286 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13287 Similarly for NE_EXPR. */
13288 if (TREE_CODE (arg0) == BIT_AND_EXPR
13289 && TREE_CODE (arg1) == INTEGER_CST
13290 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13292 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13293 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13294 TREE_OPERAND (arg0, 1));
13295 tree dandnotc
13296 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13297 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13298 notc);
13299 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13300 if (integer_nonzerop (dandnotc))
13301 return omit_one_operand_loc (loc, type, rslt, arg0);
13304 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13305 Similarly for NE_EXPR. */
13306 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13307 && TREE_CODE (arg1) == INTEGER_CST
13308 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13310 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13311 tree candnotd
13312 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13313 TREE_OPERAND (arg0, 1),
13314 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13315 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13316 if (integer_nonzerop (candnotd))
13317 return omit_one_operand_loc (loc, type, rslt, arg0);
13320 /* If this is a comparison of a field, we may be able to simplify it. */
13321 if ((TREE_CODE (arg0) == COMPONENT_REF
13322 || TREE_CODE (arg0) == BIT_FIELD_REF)
13323 /* Handle the constant case even without -O
13324 to make sure the warnings are given. */
13325 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13327 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13328 if (t1)
13329 return t1;
13332 /* Optimize comparisons of strlen vs zero to a compare of the
13333 first character of the string vs zero. To wit,
13334 strlen(ptr) == 0 => *ptr == 0
13335 strlen(ptr) != 0 => *ptr != 0
13336 Other cases should reduce to one of these two (or a constant)
13337 due to the return value of strlen being unsigned. */
13338 if (TREE_CODE (arg0) == CALL_EXPR
13339 && integer_zerop (arg1))
13341 tree fndecl = get_callee_fndecl (arg0);
13343 if (fndecl
13344 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13345 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13346 && call_expr_nargs (arg0) == 1
13347 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13349 tree iref = build_fold_indirect_ref_loc (loc,
13350 CALL_EXPR_ARG (arg0, 0));
13351 return fold_build2_loc (loc, code, type, iref,
13352 build_int_cst (TREE_TYPE (iref), 0));
13356 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13357 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13358 if (TREE_CODE (arg0) == RSHIFT_EXPR
13359 && integer_zerop (arg1)
13360 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13362 tree arg00 = TREE_OPERAND (arg0, 0);
13363 tree arg01 = TREE_OPERAND (arg0, 1);
13364 tree itype = TREE_TYPE (arg00);
13365 if (TREE_INT_CST_HIGH (arg01) == 0
13366 && TREE_INT_CST_LOW (arg01)
13367 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13369 if (TYPE_UNSIGNED (itype))
13371 itype = signed_type_for (itype);
13372 arg00 = fold_convert_loc (loc, itype, arg00);
13374 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13375 type, arg00, build_zero_cst (itype));
13379 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13380 if (integer_zerop (arg1)
13381 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13382 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13383 TREE_OPERAND (arg0, 1));
13385 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13386 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13387 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13388 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13389 build_zero_cst (TREE_TYPE (arg0)));
13390 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13391 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13392 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13393 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13394 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13395 build_zero_cst (TREE_TYPE (arg0)));
13397 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13398 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13399 && TREE_CODE (arg1) == INTEGER_CST
13400 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13401 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13402 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13403 TREE_OPERAND (arg0, 1), arg1));
13405 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13406 (X & C) == 0 when C is a single bit. */
13407 if (TREE_CODE (arg0) == BIT_AND_EXPR
13408 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13409 && integer_zerop (arg1)
13410 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13412 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13413 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13414 TREE_OPERAND (arg0, 1));
13415 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13416 type, tem,
13417 fold_convert_loc (loc, TREE_TYPE (arg0),
13418 arg1));
13421 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13422 constant C is a power of two, i.e. a single bit. */
13423 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13424 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13425 && integer_zerop (arg1)
13426 && integer_pow2p (TREE_OPERAND (arg0, 1))
13427 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13428 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13430 tree arg00 = TREE_OPERAND (arg0, 0);
13431 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13432 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13435 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13436 when is C is a power of two, i.e. a single bit. */
13437 if (TREE_CODE (arg0) == BIT_AND_EXPR
13438 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13439 && integer_zerop (arg1)
13440 && integer_pow2p (TREE_OPERAND (arg0, 1))
13441 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13442 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13444 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13445 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13446 arg000, TREE_OPERAND (arg0, 1));
13447 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13448 tem, build_int_cst (TREE_TYPE (tem), 0));
13451 if (integer_zerop (arg1)
13452 && tree_expr_nonzero_p (arg0))
13454 tree res = constant_boolean_node (code==NE_EXPR, type);
13455 return omit_one_operand_loc (loc, type, res, arg0);
13458 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13459 if (TREE_CODE (arg0) == NEGATE_EXPR
13460 && TREE_CODE (arg1) == NEGATE_EXPR)
13461 return fold_build2_loc (loc, code, type,
13462 TREE_OPERAND (arg0, 0),
13463 fold_convert_loc (loc, TREE_TYPE (arg0),
13464 TREE_OPERAND (arg1, 0)));
13466 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13467 if (TREE_CODE (arg0) == BIT_AND_EXPR
13468 && TREE_CODE (arg1) == BIT_AND_EXPR)
13470 tree arg00 = TREE_OPERAND (arg0, 0);
13471 tree arg01 = TREE_OPERAND (arg0, 1);
13472 tree arg10 = TREE_OPERAND (arg1, 0);
13473 tree arg11 = TREE_OPERAND (arg1, 1);
13474 tree itype = TREE_TYPE (arg0);
13476 if (operand_equal_p (arg01, arg11, 0))
13477 return fold_build2_loc (loc, code, type,
13478 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13479 fold_build2_loc (loc,
13480 BIT_XOR_EXPR, itype,
13481 arg00, arg10),
13482 arg01),
13483 build_zero_cst (itype));
13485 if (operand_equal_p (arg01, arg10, 0))
13486 return fold_build2_loc (loc, code, type,
13487 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13488 fold_build2_loc (loc,
13489 BIT_XOR_EXPR, itype,
13490 arg00, arg11),
13491 arg01),
13492 build_zero_cst (itype));
13494 if (operand_equal_p (arg00, arg11, 0))
13495 return fold_build2_loc (loc, code, type,
13496 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13497 fold_build2_loc (loc,
13498 BIT_XOR_EXPR, itype,
13499 arg01, arg10),
13500 arg00),
13501 build_zero_cst (itype));
13503 if (operand_equal_p (arg00, arg10, 0))
13504 return fold_build2_loc (loc, code, type,
13505 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13506 fold_build2_loc (loc,
13507 BIT_XOR_EXPR, itype,
13508 arg01, arg11),
13509 arg00),
13510 build_zero_cst (itype));
13513 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13514 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13516 tree arg00 = TREE_OPERAND (arg0, 0);
13517 tree arg01 = TREE_OPERAND (arg0, 1);
13518 tree arg10 = TREE_OPERAND (arg1, 0);
13519 tree arg11 = TREE_OPERAND (arg1, 1);
13520 tree itype = TREE_TYPE (arg0);
13522 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13523 operand_equal_p guarantees no side-effects so we don't need
13524 to use omit_one_operand on Z. */
13525 if (operand_equal_p (arg01, arg11, 0))
13526 return fold_build2_loc (loc, code, type, arg00,
13527 fold_convert_loc (loc, TREE_TYPE (arg00),
13528 arg10));
13529 if (operand_equal_p (arg01, arg10, 0))
13530 return fold_build2_loc (loc, code, type, arg00,
13531 fold_convert_loc (loc, TREE_TYPE (arg00),
13532 arg11));
13533 if (operand_equal_p (arg00, arg11, 0))
13534 return fold_build2_loc (loc, code, type, arg01,
13535 fold_convert_loc (loc, TREE_TYPE (arg01),
13536 arg10));
13537 if (operand_equal_p (arg00, arg10, 0))
13538 return fold_build2_loc (loc, code, type, arg01,
13539 fold_convert_loc (loc, TREE_TYPE (arg01),
13540 arg11));
13542 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13543 if (TREE_CODE (arg01) == INTEGER_CST
13544 && TREE_CODE (arg11) == INTEGER_CST)
13546 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13547 fold_convert_loc (loc, itype, arg11));
13548 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13549 return fold_build2_loc (loc, code, type, tem,
13550 fold_convert_loc (loc, itype, arg10));
13554 /* Attempt to simplify equality/inequality comparisons of complex
13555 values. Only lower the comparison if the result is known or
13556 can be simplified to a single scalar comparison. */
13557 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13558 || TREE_CODE (arg0) == COMPLEX_CST)
13559 && (TREE_CODE (arg1) == COMPLEX_EXPR
13560 || TREE_CODE (arg1) == COMPLEX_CST))
13562 tree real0, imag0, real1, imag1;
13563 tree rcond, icond;
13565 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13567 real0 = TREE_OPERAND (arg0, 0);
13568 imag0 = TREE_OPERAND (arg0, 1);
13570 else
13572 real0 = TREE_REALPART (arg0);
13573 imag0 = TREE_IMAGPART (arg0);
13576 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13578 real1 = TREE_OPERAND (arg1, 0);
13579 imag1 = TREE_OPERAND (arg1, 1);
13581 else
13583 real1 = TREE_REALPART (arg1);
13584 imag1 = TREE_IMAGPART (arg1);
13587 rcond = fold_binary_loc (loc, code, type, real0, real1);
13588 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13590 if (integer_zerop (rcond))
13592 if (code == EQ_EXPR)
13593 return omit_two_operands_loc (loc, type, boolean_false_node,
13594 imag0, imag1);
13595 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13597 else
13599 if (code == NE_EXPR)
13600 return omit_two_operands_loc (loc, type, boolean_true_node,
13601 imag0, imag1);
13602 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13606 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13607 if (icond && TREE_CODE (icond) == INTEGER_CST)
13609 if (integer_zerop (icond))
13611 if (code == EQ_EXPR)
13612 return omit_two_operands_loc (loc, type, boolean_false_node,
13613 real0, real1);
13614 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13616 else
13618 if (code == NE_EXPR)
13619 return omit_two_operands_loc (loc, type, boolean_true_node,
13620 real0, real1);
13621 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13626 return NULL_TREE;
13628 case LT_EXPR:
13629 case GT_EXPR:
13630 case LE_EXPR:
13631 case GE_EXPR:
13632 tem = fold_comparison (loc, code, type, op0, op1);
13633 if (tem != NULL_TREE)
13634 return tem;
13636 /* Transform comparisons of the form X +- C CMP X. */
13637 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13638 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13639 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13640 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13641 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13642 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13644 tree arg01 = TREE_OPERAND (arg0, 1);
13645 enum tree_code code0 = TREE_CODE (arg0);
13646 int is_positive;
13648 if (TREE_CODE (arg01) == REAL_CST)
13649 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13650 else
13651 is_positive = tree_int_cst_sgn (arg01);
13653 /* (X - c) > X becomes false. */
13654 if (code == GT_EXPR
13655 && ((code0 == MINUS_EXPR && is_positive >= 0)
13656 || (code0 == PLUS_EXPR && is_positive <= 0)))
13658 if (TREE_CODE (arg01) == INTEGER_CST
13659 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13660 fold_overflow_warning (("assuming signed overflow does not "
13661 "occur when assuming that (X - c) > X "
13662 "is always false"),
13663 WARN_STRICT_OVERFLOW_ALL);
13664 return constant_boolean_node (0, type);
13667 /* Likewise (X + c) < X becomes false. */
13668 if (code == LT_EXPR
13669 && ((code0 == PLUS_EXPR && is_positive >= 0)
13670 || (code0 == MINUS_EXPR && is_positive <= 0)))
13672 if (TREE_CODE (arg01) == INTEGER_CST
13673 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13674 fold_overflow_warning (("assuming signed overflow does not "
13675 "occur when assuming that "
13676 "(X + c) < X is always false"),
13677 WARN_STRICT_OVERFLOW_ALL);
13678 return constant_boolean_node (0, type);
13681 /* Convert (X - c) <= X to true. */
13682 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13683 && code == LE_EXPR
13684 && ((code0 == MINUS_EXPR && is_positive >= 0)
13685 || (code0 == PLUS_EXPR && is_positive <= 0)))
13687 if (TREE_CODE (arg01) == INTEGER_CST
13688 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13689 fold_overflow_warning (("assuming signed overflow does not "
13690 "occur when assuming that "
13691 "(X - c) <= X is always true"),
13692 WARN_STRICT_OVERFLOW_ALL);
13693 return constant_boolean_node (1, type);
13696 /* Convert (X + c) >= X to true. */
13697 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13698 && code == GE_EXPR
13699 && ((code0 == PLUS_EXPR && is_positive >= 0)
13700 || (code0 == MINUS_EXPR && is_positive <= 0)))
13702 if (TREE_CODE (arg01) == INTEGER_CST
13703 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13704 fold_overflow_warning (("assuming signed overflow does not "
13705 "occur when assuming that "
13706 "(X + c) >= X is always true"),
13707 WARN_STRICT_OVERFLOW_ALL);
13708 return constant_boolean_node (1, type);
13711 if (TREE_CODE (arg01) == INTEGER_CST)
13713 /* Convert X + c > X and X - c < X to true for integers. */
13714 if (code == GT_EXPR
13715 && ((code0 == PLUS_EXPR && is_positive > 0)
13716 || (code0 == MINUS_EXPR && is_positive < 0)))
13718 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13719 fold_overflow_warning (("assuming signed overflow does "
13720 "not occur when assuming that "
13721 "(X + c) > X is always true"),
13722 WARN_STRICT_OVERFLOW_ALL);
13723 return constant_boolean_node (1, type);
13726 if (code == LT_EXPR
13727 && ((code0 == MINUS_EXPR && is_positive > 0)
13728 || (code0 == PLUS_EXPR && is_positive < 0)))
13730 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13731 fold_overflow_warning (("assuming signed overflow does "
13732 "not occur when assuming that "
13733 "(X - c) < X is always true"),
13734 WARN_STRICT_OVERFLOW_ALL);
13735 return constant_boolean_node (1, type);
13738 /* Convert X + c <= X and X - c >= X to false for integers. */
13739 if (code == LE_EXPR
13740 && ((code0 == PLUS_EXPR && is_positive > 0)
13741 || (code0 == MINUS_EXPR && is_positive < 0)))
13743 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13744 fold_overflow_warning (("assuming signed overflow does "
13745 "not occur when assuming that "
13746 "(X + c) <= X is always false"),
13747 WARN_STRICT_OVERFLOW_ALL);
13748 return constant_boolean_node (0, type);
13751 if (code == GE_EXPR
13752 && ((code0 == MINUS_EXPR && is_positive > 0)
13753 || (code0 == PLUS_EXPR && is_positive < 0)))
13755 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13756 fold_overflow_warning (("assuming signed overflow does "
13757 "not occur when assuming that "
13758 "(X - c) >= X is always false"),
13759 WARN_STRICT_OVERFLOW_ALL);
13760 return constant_boolean_node (0, type);
13765 /* Comparisons with the highest or lowest possible integer of
13766 the specified precision will have known values. */
13768 tree arg1_type = TREE_TYPE (arg1);
13769 unsigned int width = TYPE_PRECISION (arg1_type);
13771 if (TREE_CODE (arg1) == INTEGER_CST
13772 && width <= HOST_BITS_PER_DOUBLE_INT
13773 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13775 HOST_WIDE_INT signed_max_hi;
13776 unsigned HOST_WIDE_INT signed_max_lo;
13777 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13779 if (width <= HOST_BITS_PER_WIDE_INT)
13781 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13782 - 1;
13783 signed_max_hi = 0;
13784 max_hi = 0;
13786 if (TYPE_UNSIGNED (arg1_type))
13788 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13789 min_lo = 0;
13790 min_hi = 0;
13792 else
13794 max_lo = signed_max_lo;
13795 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13796 min_hi = -1;
13799 else
13801 width -= HOST_BITS_PER_WIDE_INT;
13802 signed_max_lo = -1;
13803 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13804 - 1;
13805 max_lo = -1;
13806 min_lo = 0;
13808 if (TYPE_UNSIGNED (arg1_type))
13810 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13811 min_hi = 0;
13813 else
13815 max_hi = signed_max_hi;
13816 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13820 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13821 && TREE_INT_CST_LOW (arg1) == max_lo)
13822 switch (code)
13824 case GT_EXPR:
13825 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13827 case GE_EXPR:
13828 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13830 case LE_EXPR:
13831 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13833 case LT_EXPR:
13834 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13836 /* The GE_EXPR and LT_EXPR cases above are not normally
13837 reached because of previous transformations. */
13839 default:
13840 break;
13842 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13843 == max_hi
13844 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13845 switch (code)
13847 case GT_EXPR:
13848 arg1 = const_binop (PLUS_EXPR, arg1,
13849 build_int_cst (TREE_TYPE (arg1), 1));
13850 return fold_build2_loc (loc, EQ_EXPR, type,
13851 fold_convert_loc (loc,
13852 TREE_TYPE (arg1), arg0),
13853 arg1);
13854 case LE_EXPR:
13855 arg1 = const_binop (PLUS_EXPR, arg1,
13856 build_int_cst (TREE_TYPE (arg1), 1));
13857 return fold_build2_loc (loc, NE_EXPR, type,
13858 fold_convert_loc (loc, TREE_TYPE (arg1),
13859 arg0),
13860 arg1);
13861 default:
13862 break;
13864 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13865 == min_hi
13866 && TREE_INT_CST_LOW (arg1) == min_lo)
13867 switch (code)
13869 case LT_EXPR:
13870 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13872 case LE_EXPR:
13873 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13875 case GE_EXPR:
13876 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13878 case GT_EXPR:
13879 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13881 default:
13882 break;
13884 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13885 == min_hi
13886 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13887 switch (code)
13889 case GE_EXPR:
13890 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13891 return fold_build2_loc (loc, NE_EXPR, type,
13892 fold_convert_loc (loc,
13893 TREE_TYPE (arg1), arg0),
13894 arg1);
13895 case LT_EXPR:
13896 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13897 return fold_build2_loc (loc, EQ_EXPR, type,
13898 fold_convert_loc (loc, TREE_TYPE (arg1),
13899 arg0),
13900 arg1);
13901 default:
13902 break;
13905 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13906 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13907 && TYPE_UNSIGNED (arg1_type)
13908 /* We will flip the signedness of the comparison operator
13909 associated with the mode of arg1, so the sign bit is
13910 specified by this mode. Check that arg1 is the signed
13911 max associated with this sign bit. */
13912 && width == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13913 /* signed_type does not work on pointer types. */
13914 && INTEGRAL_TYPE_P (arg1_type))
13916 /* The following case also applies to X < signed_max+1
13917 and X >= signed_max+1 because previous transformations. */
13918 if (code == LE_EXPR || code == GT_EXPR)
13920 tree st = signed_type_for (arg1_type);
13921 return fold_build2_loc (loc,
13922 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13923 type, fold_convert_loc (loc, st, arg0),
13924 build_int_cst (st, 0));
13930 /* If we are comparing an ABS_EXPR with a constant, we can
13931 convert all the cases into explicit comparisons, but they may
13932 well not be faster than doing the ABS and one comparison.
13933 But ABS (X) <= C is a range comparison, which becomes a subtraction
13934 and a comparison, and is probably faster. */
13935 if (code == LE_EXPR
13936 && TREE_CODE (arg1) == INTEGER_CST
13937 && TREE_CODE (arg0) == ABS_EXPR
13938 && ! TREE_SIDE_EFFECTS (arg0)
13939 && (0 != (tem = negate_expr (arg1)))
13940 && TREE_CODE (tem) == INTEGER_CST
13941 && !TREE_OVERFLOW (tem))
13942 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13943 build2 (GE_EXPR, type,
13944 TREE_OPERAND (arg0, 0), tem),
13945 build2 (LE_EXPR, type,
13946 TREE_OPERAND (arg0, 0), arg1));
13948 /* Convert ABS_EXPR<x> >= 0 to true. */
13949 strict_overflow_p = false;
13950 if (code == GE_EXPR
13951 && (integer_zerop (arg1)
13952 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13953 && real_zerop (arg1)))
13954 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13956 if (strict_overflow_p)
13957 fold_overflow_warning (("assuming signed overflow does not occur "
13958 "when simplifying comparison of "
13959 "absolute value and zero"),
13960 WARN_STRICT_OVERFLOW_CONDITIONAL);
13961 return omit_one_operand_loc (loc, type,
13962 constant_boolean_node (true, type),
13963 arg0);
13966 /* Convert ABS_EXPR<x> < 0 to false. */
13967 strict_overflow_p = false;
13968 if (code == LT_EXPR
13969 && (integer_zerop (arg1) || real_zerop (arg1))
13970 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13972 if (strict_overflow_p)
13973 fold_overflow_warning (("assuming signed overflow does not occur "
13974 "when simplifying comparison of "
13975 "absolute value and zero"),
13976 WARN_STRICT_OVERFLOW_CONDITIONAL);
13977 return omit_one_operand_loc (loc, type,
13978 constant_boolean_node (false, type),
13979 arg0);
13982 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13983 and similarly for >= into !=. */
13984 if ((code == LT_EXPR || code == GE_EXPR)
13985 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13986 && TREE_CODE (arg1) == LSHIFT_EXPR
13987 && integer_onep (TREE_OPERAND (arg1, 0)))
13988 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13989 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13990 TREE_OPERAND (arg1, 1)),
13991 build_zero_cst (TREE_TYPE (arg0)));
13993 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13994 otherwise Y might be >= # of bits in X's type and thus e.g.
13995 (unsigned char) (1 << Y) for Y 15 might be 0.
13996 If the cast is widening, then 1 << Y should have unsigned type,
13997 otherwise if Y is number of bits in the signed shift type minus 1,
13998 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13999 31 might be 0xffffffff80000000. */
14000 if ((code == LT_EXPR || code == GE_EXPR)
14001 && TYPE_UNSIGNED (TREE_TYPE (arg0))
14002 && CONVERT_EXPR_P (arg1)
14003 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
14004 && (TYPE_PRECISION (TREE_TYPE (arg1))
14005 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
14006 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
14007 || (TYPE_PRECISION (TREE_TYPE (arg1))
14008 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
14009 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
14011 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
14012 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
14013 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
14014 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
14015 build_zero_cst (TREE_TYPE (arg0)));
14018 return NULL_TREE;
14020 case UNORDERED_EXPR:
14021 case ORDERED_EXPR:
14022 case UNLT_EXPR:
14023 case UNLE_EXPR:
14024 case UNGT_EXPR:
14025 case UNGE_EXPR:
14026 case UNEQ_EXPR:
14027 case LTGT_EXPR:
14028 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
14030 t1 = fold_relational_const (code, type, arg0, arg1);
14031 if (t1 != NULL_TREE)
14032 return t1;
14035 /* If the first operand is NaN, the result is constant. */
14036 if (TREE_CODE (arg0) == REAL_CST
14037 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
14038 && (code != LTGT_EXPR || ! flag_trapping_math))
14040 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
14041 ? integer_zero_node
14042 : integer_one_node;
14043 return omit_one_operand_loc (loc, type, t1, arg1);
14046 /* If the second operand is NaN, the result is constant. */
14047 if (TREE_CODE (arg1) == REAL_CST
14048 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
14049 && (code != LTGT_EXPR || ! flag_trapping_math))
14051 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
14052 ? integer_zero_node
14053 : integer_one_node;
14054 return omit_one_operand_loc (loc, type, t1, arg0);
14057 /* Simplify unordered comparison of something with itself. */
14058 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
14059 && operand_equal_p (arg0, arg1, 0))
14060 return constant_boolean_node (1, type);
14062 if (code == LTGT_EXPR
14063 && !flag_trapping_math
14064 && operand_equal_p (arg0, arg1, 0))
14065 return constant_boolean_node (0, type);
14067 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
14069 tree targ0 = strip_float_extensions (arg0);
14070 tree targ1 = strip_float_extensions (arg1);
14071 tree newtype = TREE_TYPE (targ0);
14073 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
14074 newtype = TREE_TYPE (targ1);
14076 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
14077 return fold_build2_loc (loc, code, type,
14078 fold_convert_loc (loc, newtype, targ0),
14079 fold_convert_loc (loc, newtype, targ1));
14082 return NULL_TREE;
14084 case COMPOUND_EXPR:
14085 /* When pedantic, a compound expression can be neither an lvalue
14086 nor an integer constant expression. */
14087 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
14088 return NULL_TREE;
14089 /* Don't let (0, 0) be null pointer constant. */
14090 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
14091 : fold_convert_loc (loc, type, arg1);
14092 return pedantic_non_lvalue_loc (loc, tem);
14094 case COMPLEX_EXPR:
14095 if ((TREE_CODE (arg0) == REAL_CST
14096 && TREE_CODE (arg1) == REAL_CST)
14097 || (TREE_CODE (arg0) == INTEGER_CST
14098 && TREE_CODE (arg1) == INTEGER_CST))
14099 return build_complex (type, arg0, arg1);
14100 if (TREE_CODE (arg0) == REALPART_EXPR
14101 && TREE_CODE (arg1) == IMAGPART_EXPR
14102 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
14103 && operand_equal_p (TREE_OPERAND (arg0, 0),
14104 TREE_OPERAND (arg1, 0), 0))
14105 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
14106 TREE_OPERAND (arg1, 0));
14107 return NULL_TREE;
14109 case ASSERT_EXPR:
14110 /* An ASSERT_EXPR should never be passed to fold_binary. */
14111 gcc_unreachable ();
14113 case VEC_PACK_TRUNC_EXPR:
14114 case VEC_PACK_FIX_TRUNC_EXPR:
14116 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14117 tree *elts;
14119 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
14120 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
14121 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14122 return NULL_TREE;
14124 elts = XALLOCAVEC (tree, nelts);
14125 if (!vec_cst_ctor_to_array (arg0, elts)
14126 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
14127 return NULL_TREE;
14129 for (i = 0; i < nelts; i++)
14131 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
14132 ? NOP_EXPR : FIX_TRUNC_EXPR,
14133 TREE_TYPE (type), elts[i]);
14134 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
14135 return NULL_TREE;
14138 return build_vector (type, elts);
14141 case VEC_WIDEN_MULT_LO_EXPR:
14142 case VEC_WIDEN_MULT_HI_EXPR:
14143 case VEC_WIDEN_MULT_EVEN_EXPR:
14144 case VEC_WIDEN_MULT_ODD_EXPR:
14146 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
14147 unsigned int out, ofs, scale;
14148 tree *elts;
14150 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
14151 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
14152 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14153 return NULL_TREE;
14155 elts = XALLOCAVEC (tree, nelts * 4);
14156 if (!vec_cst_ctor_to_array (arg0, elts)
14157 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
14158 return NULL_TREE;
14160 if (code == VEC_WIDEN_MULT_LO_EXPR)
14161 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
14162 else if (code == VEC_WIDEN_MULT_HI_EXPR)
14163 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14164 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14165 scale = 1, ofs = 0;
14166 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14167 scale = 1, ofs = 1;
14169 for (out = 0; out < nelts; out++)
14171 unsigned int in1 = (out << scale) + ofs;
14172 unsigned int in2 = in1 + nelts * 2;
14173 tree t1, t2;
14175 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14176 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14178 if (t1 == NULL_TREE || t2 == NULL_TREE)
14179 return NULL_TREE;
14180 elts[out] = const_binop (MULT_EXPR, t1, t2);
14181 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14182 return NULL_TREE;
14185 return build_vector (type, elts);
14188 default:
14189 return NULL_TREE;
14190 } /* switch (code) */
14193 /* Fold a binary expression of code CODE and type TYPE with operands
14194 OP0 and OP1. Return the folded expression if folding is
14195 successful. Otherwise, return NULL_TREE.
14196 This is a wrapper around fold_binary_1 function (which does the
14197 actual folding). Set the EXPR_FOLDED flag of the folded expression
14198 if folding is successful. */
14199 tree
14200 fold_binary_loc (location_t loc,
14201 enum tree_code code, tree type, tree op0, tree op1)
14203 tree tem = fold_binary_loc_1 (loc, code, type, op0, op1);
14204 if (tem)
14205 set_expr_folded_flag (tem);
14206 return tem;
14209 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14210 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14211 of GOTO_EXPR. */
14213 static tree
14214 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14216 switch (TREE_CODE (*tp))
14218 case LABEL_EXPR:
14219 return *tp;
14221 case GOTO_EXPR:
14222 *walk_subtrees = 0;
14224 /* ... fall through ... */
14226 default:
14227 return NULL_TREE;
14231 /* Return whether the sub-tree ST contains a label which is accessible from
14232 outside the sub-tree. */
14234 static bool
14235 contains_label_p (tree st)
14237 return
14238 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14241 /* Fold a ternary expression of code CODE and type TYPE with operands
14242 OP0, OP1, and OP2. Return the folded expression if folding is
14243 successful. Otherwise, return NULL_TREE. */
14245 static tree
14246 fold_ternary_loc_1 (location_t loc, enum tree_code code, tree type,
14247 tree op0, tree op1, tree op2)
14249 tree tem;
14250 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14251 enum tree_code_class kind = TREE_CODE_CLASS (code);
14253 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14254 && TREE_CODE_LENGTH (code) == 3);
14256 /* Strip any conversions that don't change the mode. This is safe
14257 for every expression, except for a comparison expression because
14258 its signedness is derived from its operands. So, in the latter
14259 case, only strip conversions that don't change the signedness.
14261 Note that this is done as an internal manipulation within the
14262 constant folder, in order to find the simplest representation of
14263 the arguments so that their form can be studied. In any cases,
14264 the appropriate type conversions should be put back in the tree
14265 that will get out of the constant folder. */
14266 if (op0)
14268 arg0 = op0;
14269 STRIP_NOPS (arg0);
14272 if (op1)
14274 arg1 = op1;
14275 STRIP_NOPS (arg1);
14278 if (op2)
14280 arg2 = op2;
14281 STRIP_NOPS (arg2);
14284 switch (code)
14286 case COMPONENT_REF:
14287 if (TREE_CODE (arg0) == CONSTRUCTOR
14288 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14290 unsigned HOST_WIDE_INT idx;
14291 tree field, value;
14292 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14293 if (field == arg1)
14294 return value;
14296 return NULL_TREE;
14298 case COND_EXPR:
14299 case VEC_COND_EXPR:
14300 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14301 so all simple results must be passed through pedantic_non_lvalue. */
14302 if (TREE_CODE (arg0) == INTEGER_CST)
14304 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14305 tem = integer_zerop (arg0) ? op2 : op1;
14306 /* Only optimize constant conditions when the selected branch
14307 has the same type as the COND_EXPR. This avoids optimizing
14308 away "c ? x : throw", where the throw has a void type.
14309 Avoid throwing away that operand which contains label. */
14310 if ((!TREE_SIDE_EFFECTS (unused_op)
14311 || !contains_label_p (unused_op))
14312 && (! VOID_TYPE_P (TREE_TYPE (tem))
14313 || VOID_TYPE_P (type)))
14314 return pedantic_non_lvalue_loc (loc, tem);
14315 return NULL_TREE;
14317 else if (TREE_CODE (arg0) == VECTOR_CST)
14319 if (integer_all_onesp (arg0))
14320 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14321 if (integer_zerop (arg0))
14322 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14324 if ((TREE_CODE (arg1) == VECTOR_CST
14325 || TREE_CODE (arg1) == CONSTRUCTOR)
14326 && (TREE_CODE (arg2) == VECTOR_CST
14327 || TREE_CODE (arg2) == CONSTRUCTOR))
14329 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14330 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14331 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14332 for (i = 0; i < nelts; i++)
14334 tree val = VECTOR_CST_ELT (arg0, i);
14335 if (integer_all_onesp (val))
14336 sel[i] = i;
14337 else if (integer_zerop (val))
14338 sel[i] = nelts + i;
14339 else /* Currently unreachable. */
14340 return NULL_TREE;
14342 tree t = fold_vec_perm (type, arg1, arg2, sel);
14343 if (t != NULL_TREE)
14344 return t;
14348 if (operand_equal_p (arg1, op2, 0))
14349 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14351 /* If we have A op B ? A : C, we may be able to convert this to a
14352 simpler expression, depending on the operation and the values
14353 of B and C. Signed zeros prevent all of these transformations,
14354 for reasons given above each one.
14356 Also try swapping the arguments and inverting the conditional. */
14357 if (COMPARISON_CLASS_P (arg0)
14358 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14359 arg1, TREE_OPERAND (arg0, 1))
14360 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14362 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14363 if (tem)
14364 return tem;
14367 if (COMPARISON_CLASS_P (arg0)
14368 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14369 op2,
14370 TREE_OPERAND (arg0, 1))
14371 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14373 location_t loc0 = expr_location_or (arg0, loc);
14374 tem = fold_invert_truthvalue (loc0, arg0);
14375 if (tem && COMPARISON_CLASS_P (tem))
14377 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14378 if (tem)
14379 return tem;
14383 /* If the second operand is simpler than the third, swap them
14384 since that produces better jump optimization results. */
14385 if (truth_value_p (TREE_CODE (arg0))
14386 && tree_swap_operands_p (op1, op2, false))
14388 location_t loc0 = expr_location_or (arg0, loc);
14389 /* See if this can be inverted. If it can't, possibly because
14390 it was a floating-point inequality comparison, don't do
14391 anything. */
14392 tem = fold_invert_truthvalue (loc0, arg0);
14393 if (tem)
14394 return fold_build3_loc (loc, code, type, tem, op2, op1);
14397 /* Convert A ? 1 : 0 to simply A. */
14398 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14399 : (integer_onep (op1)
14400 && !VECTOR_TYPE_P (type)))
14401 && integer_zerop (op2)
14402 /* If we try to convert OP0 to our type, the
14403 call to fold will try to move the conversion inside
14404 a COND, which will recurse. In that case, the COND_EXPR
14405 is probably the best choice, so leave it alone. */
14406 && type == TREE_TYPE (arg0))
14407 return pedantic_non_lvalue_loc (loc, arg0);
14409 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14410 over COND_EXPR in cases such as floating point comparisons. */
14411 if (integer_zerop (op1)
14412 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14413 : (integer_onep (op2)
14414 && !VECTOR_TYPE_P (type)))
14415 && truth_value_p (TREE_CODE (arg0)))
14416 return pedantic_non_lvalue_loc (loc,
14417 fold_convert_loc (loc, type,
14418 invert_truthvalue_loc (loc,
14419 arg0)));
14421 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14422 if (TREE_CODE (arg0) == LT_EXPR
14423 && integer_zerop (TREE_OPERAND (arg0, 1))
14424 && integer_zerop (op2)
14425 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14427 /* sign_bit_p looks through both zero and sign extensions,
14428 but for this optimization only sign extensions are
14429 usable. */
14430 tree tem2 = TREE_OPERAND (arg0, 0);
14431 while (tem != tem2)
14433 if (TREE_CODE (tem2) != NOP_EXPR
14434 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14436 tem = NULL_TREE;
14437 break;
14439 tem2 = TREE_OPERAND (tem2, 0);
14441 /* sign_bit_p only checks ARG1 bits within A's precision.
14442 If <sign bit of A> has wider type than A, bits outside
14443 of A's precision in <sign bit of A> need to be checked.
14444 If they are all 0, this optimization needs to be done
14445 in unsigned A's type, if they are all 1 in signed A's type,
14446 otherwise this can't be done. */
14447 if (tem
14448 && TYPE_PRECISION (TREE_TYPE (tem))
14449 < TYPE_PRECISION (TREE_TYPE (arg1))
14450 && TYPE_PRECISION (TREE_TYPE (tem))
14451 < TYPE_PRECISION (type))
14453 unsigned HOST_WIDE_INT mask_lo;
14454 HOST_WIDE_INT mask_hi;
14455 int inner_width, outer_width;
14456 tree tem_type;
14458 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14459 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14460 if (outer_width > TYPE_PRECISION (type))
14461 outer_width = TYPE_PRECISION (type);
14463 if (outer_width > HOST_BITS_PER_WIDE_INT)
14465 mask_hi = (HOST_WIDE_INT_M1U
14466 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14467 mask_lo = -1;
14469 else
14471 mask_hi = 0;
14472 mask_lo = (HOST_WIDE_INT_M1U
14473 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14475 if (inner_width > HOST_BITS_PER_WIDE_INT)
14477 mask_hi &= ~(HOST_WIDE_INT_M1U
14478 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14479 mask_lo = 0;
14481 else
14482 mask_lo &= ~(HOST_WIDE_INT_M1U
14483 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14485 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14486 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14488 tem_type = signed_type_for (TREE_TYPE (tem));
14489 tem = fold_convert_loc (loc, tem_type, tem);
14491 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14492 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14494 tem_type = unsigned_type_for (TREE_TYPE (tem));
14495 tem = fold_convert_loc (loc, tem_type, tem);
14497 else
14498 tem = NULL;
14501 if (tem)
14502 return
14503 fold_convert_loc (loc, type,
14504 fold_build2_loc (loc, BIT_AND_EXPR,
14505 TREE_TYPE (tem), tem,
14506 fold_convert_loc (loc,
14507 TREE_TYPE (tem),
14508 arg1)));
14511 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14512 already handled above. */
14513 if (TREE_CODE (arg0) == BIT_AND_EXPR
14514 && integer_onep (TREE_OPERAND (arg0, 1))
14515 && integer_zerop (op2)
14516 && integer_pow2p (arg1))
14518 tree tem = TREE_OPERAND (arg0, 0);
14519 STRIP_NOPS (tem);
14520 if (TREE_CODE (tem) == RSHIFT_EXPR
14521 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14522 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14523 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14524 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14525 TREE_OPERAND (tem, 0), arg1);
14528 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14529 is probably obsolete because the first operand should be a
14530 truth value (that's why we have the two cases above), but let's
14531 leave it in until we can confirm this for all front-ends. */
14532 if (integer_zerop (op2)
14533 && TREE_CODE (arg0) == NE_EXPR
14534 && integer_zerop (TREE_OPERAND (arg0, 1))
14535 && integer_pow2p (arg1)
14536 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14537 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14538 arg1, OEP_ONLY_CONST))
14539 return pedantic_non_lvalue_loc (loc,
14540 fold_convert_loc (loc, type,
14541 TREE_OPERAND (arg0, 0)));
14543 /* Disable the transformations below for vectors, since
14544 fold_binary_op_with_conditional_arg may undo them immediately,
14545 yielding an infinite loop. */
14546 if (code == VEC_COND_EXPR)
14547 return NULL_TREE;
14549 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14550 if (integer_zerop (op2)
14551 && truth_value_p (TREE_CODE (arg0))
14552 && truth_value_p (TREE_CODE (arg1))
14553 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14554 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14555 : TRUTH_ANDIF_EXPR,
14556 type, fold_convert_loc (loc, type, arg0), arg1);
14558 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14559 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14560 && truth_value_p (TREE_CODE (arg0))
14561 && truth_value_p (TREE_CODE (arg1))
14562 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14564 location_t loc0 = expr_location_or (arg0, loc);
14565 /* Only perform transformation if ARG0 is easily inverted. */
14566 tem = fold_invert_truthvalue (loc0, arg0);
14567 if (tem)
14568 return fold_build2_loc (loc, code == VEC_COND_EXPR
14569 ? BIT_IOR_EXPR
14570 : TRUTH_ORIF_EXPR,
14571 type, fold_convert_loc (loc, type, tem),
14572 arg1);
14575 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14576 if (integer_zerop (arg1)
14577 && truth_value_p (TREE_CODE (arg0))
14578 && truth_value_p (TREE_CODE (op2))
14579 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14581 location_t loc0 = expr_location_or (arg0, loc);
14582 /* Only perform transformation if ARG0 is easily inverted. */
14583 tem = fold_invert_truthvalue (loc0, arg0);
14584 if (tem)
14585 return fold_build2_loc (loc, code == VEC_COND_EXPR
14586 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14587 type, fold_convert_loc (loc, type, tem),
14588 op2);
14591 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14592 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14593 && truth_value_p (TREE_CODE (arg0))
14594 && truth_value_p (TREE_CODE (op2))
14595 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14596 return fold_build2_loc (loc, code == VEC_COND_EXPR
14597 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14598 type, fold_convert_loc (loc, type, arg0), op2);
14600 return NULL_TREE;
14602 case CALL_EXPR:
14603 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14604 of fold_ternary on them. */
14605 gcc_unreachable ();
14607 case BIT_FIELD_REF:
14608 if ((TREE_CODE (arg0) == VECTOR_CST
14609 || (TREE_CODE (arg0) == CONSTRUCTOR
14610 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14611 && (type == TREE_TYPE (TREE_TYPE (arg0))
14612 || (TREE_CODE (type) == VECTOR_TYPE
14613 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14615 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14616 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14617 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14618 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14620 if (n != 0
14621 && (idx % width) == 0
14622 && (n % width) == 0
14623 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14625 idx = idx / width;
14626 n = n / width;
14628 if (TREE_CODE (arg0) == VECTOR_CST)
14630 if (n == 1)
14631 return VECTOR_CST_ELT (arg0, idx);
14633 tree *vals = XALLOCAVEC (tree, n);
14634 for (unsigned i = 0; i < n; ++i)
14635 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14636 return build_vector (type, vals);
14639 /* Constructor elements can be subvectors. */
14640 unsigned HOST_WIDE_INT k = 1;
14641 if (CONSTRUCTOR_NELTS (arg0) != 0)
14643 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14644 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14645 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14648 /* We keep an exact subset of the constructor elements. */
14649 if ((idx % k) == 0 && (n % k) == 0)
14651 if (CONSTRUCTOR_NELTS (arg0) == 0)
14652 return build_constructor (type, NULL);
14653 idx /= k;
14654 n /= k;
14655 if (n == 1)
14657 if (idx < CONSTRUCTOR_NELTS (arg0))
14658 return CONSTRUCTOR_ELT (arg0, idx)->value;
14659 return build_zero_cst (type);
14662 vec<constructor_elt, va_gc> *vals;
14663 vec_alloc (vals, n);
14664 for (unsigned i = 0;
14665 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14666 ++i)
14667 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14668 CONSTRUCTOR_ELT
14669 (arg0, idx + i)->value);
14670 return build_constructor (type, vals);
14672 /* The bitfield references a single constructor element. */
14673 else if (idx + n <= (idx / k + 1) * k)
14675 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14676 return build_zero_cst (type);
14677 else if (n == k)
14678 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14679 else
14680 return fold_build3_loc (loc, code, type,
14681 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14682 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14687 /* A bit-field-ref that referenced the full argument can be stripped. */
14688 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14689 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14690 && integer_zerop (op2))
14691 return fold_convert_loc (loc, type, arg0);
14693 /* On constants we can use native encode/interpret to constant
14694 fold (nearly) all BIT_FIELD_REFs. */
14695 if (CONSTANT_CLASS_P (arg0)
14696 && can_native_interpret_type_p (type)
14697 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14698 /* This limitation should not be necessary, we just need to
14699 round this up to mode size. */
14700 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14701 /* Need bit-shifting of the buffer to relax the following. */
14702 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14704 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14705 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14706 unsigned HOST_WIDE_INT clen;
14707 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14708 /* ??? We cannot tell native_encode_expr to start at
14709 some random byte only. So limit us to a reasonable amount
14710 of work. */
14711 if (clen <= 4096)
14713 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14714 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14715 if (len > 0
14716 && len * BITS_PER_UNIT >= bitpos + bitsize)
14718 tree v = native_interpret_expr (type,
14719 b + bitpos / BITS_PER_UNIT,
14720 bitsize / BITS_PER_UNIT);
14721 if (v)
14722 return v;
14727 return NULL_TREE;
14729 case FMA_EXPR:
14730 /* For integers we can decompose the FMA if possible. */
14731 if (TREE_CODE (arg0) == INTEGER_CST
14732 && TREE_CODE (arg1) == INTEGER_CST)
14733 return fold_build2_loc (loc, PLUS_EXPR, type,
14734 const_binop (MULT_EXPR, arg0, arg1), arg2);
14735 if (integer_zerop (arg2))
14736 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14738 return fold_fma (loc, type, arg0, arg1, arg2);
14740 case VEC_PERM_EXPR:
14741 if (TREE_CODE (arg2) == VECTOR_CST)
14743 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14744 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14745 tree t;
14746 bool need_mask_canon = false;
14747 bool all_in_vec0 = true;
14748 bool all_in_vec1 = true;
14749 bool maybe_identity = true;
14750 bool single_arg = (op0 == op1);
14751 bool changed = false;
14753 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14754 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14755 for (i = 0; i < nelts; i++)
14757 tree val = VECTOR_CST_ELT (arg2, i);
14758 if (TREE_CODE (val) != INTEGER_CST)
14759 return NULL_TREE;
14761 sel[i] = TREE_INT_CST_LOW (val) & mask;
14762 if (TREE_INT_CST_HIGH (val)
14763 || ((unsigned HOST_WIDE_INT)
14764 TREE_INT_CST_LOW (val) != sel[i]))
14765 need_mask_canon = true;
14767 if (sel[i] < nelts)
14768 all_in_vec1 = false;
14769 else
14770 all_in_vec0 = false;
14772 if ((sel[i] & (nelts-1)) != i)
14773 maybe_identity = false;
14776 if (maybe_identity)
14778 if (all_in_vec0)
14779 return op0;
14780 if (all_in_vec1)
14781 return op1;
14784 if (all_in_vec0)
14785 op1 = op0;
14786 else if (all_in_vec1)
14788 op0 = op1;
14789 for (i = 0; i < nelts; i++)
14790 sel[i] -= nelts;
14791 need_mask_canon = true;
14794 if ((TREE_CODE (op0) == VECTOR_CST
14795 || TREE_CODE (op0) == CONSTRUCTOR)
14796 && (TREE_CODE (op1) == VECTOR_CST
14797 || TREE_CODE (op1) == CONSTRUCTOR))
14799 t = fold_vec_perm (type, op0, op1, sel);
14800 if (t != NULL_TREE)
14801 return t;
14804 if (op0 == op1 && !single_arg)
14805 changed = true;
14807 if (need_mask_canon && arg2 == op2)
14809 tree *tsel = XALLOCAVEC (tree, nelts);
14810 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14811 for (i = 0; i < nelts; i++)
14812 tsel[i] = build_int_cst (eltype, sel[i]);
14813 op2 = build_vector (TREE_TYPE (arg2), tsel);
14814 changed = true;
14817 if (changed)
14818 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14820 return NULL_TREE;
14822 default:
14823 return NULL_TREE;
14824 } /* switch (code) */
14827 /* Fold a ternary expression of code CODE and type TYPE with operands
14828 OP0, OP1, and OP2. Return the folded expression if folding is
14829 successful. Otherwise, return NULL_TREE.
14830 This is a wrapper around fold_ternary_1 function (which does the
14831 actual folding). Set the EXPR_FOLDED flag of the folded expression
14832 if folding is successful. */
14834 tree
14835 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14836 tree op0, tree op1, tree op2)
14838 tree tem = fold_ternary_loc_1 (loc, code, type, op0, op1, op2);
14839 if (tem)
14840 set_expr_folded_flag (tem);
14841 return tem;
14844 /* Perform constant folding and related simplification of EXPR.
14845 The related simplifications include x*1 => x, x*0 => 0, etc.,
14846 and application of the associative law.
14847 NOP_EXPR conversions may be removed freely (as long as we
14848 are careful not to change the type of the overall expression).
14849 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14850 but we can constant-fold them if they have constant operands. */
14852 #ifdef ENABLE_FOLD_CHECKING
14853 # define fold(x) fold_1 (x)
14854 static tree fold_1 (tree);
14855 static
14856 #endif
14857 tree
14858 fold (tree expr)
14860 const tree t = expr;
14861 enum tree_code code = TREE_CODE (t);
14862 enum tree_code_class kind = TREE_CODE_CLASS (code);
14863 tree tem;
14864 location_t loc = EXPR_LOCATION (expr);
14866 /* Return right away if a constant. */
14867 if (kind == tcc_constant)
14868 return t;
14870 /* CALL_EXPR-like objects with variable numbers of operands are
14871 treated specially. */
14872 if (kind == tcc_vl_exp)
14874 if (code == CALL_EXPR)
14876 tem = fold_call_expr (loc, expr, false);
14877 return tem ? tem : expr;
14879 return expr;
14882 if (IS_EXPR_CODE_CLASS (kind))
14884 tree type = TREE_TYPE (t);
14885 tree op0, op1, op2;
14887 switch (TREE_CODE_LENGTH (code))
14889 case 1:
14890 op0 = TREE_OPERAND (t, 0);
14891 tem = fold_unary_loc (loc, code, type, op0);
14892 return tem ? tem : expr;
14893 case 2:
14894 op0 = TREE_OPERAND (t, 0);
14895 op1 = TREE_OPERAND (t, 1);
14896 tem = fold_binary_loc (loc, code, type, op0, op1);
14897 return tem ? tem : expr;
14898 case 3:
14899 op0 = TREE_OPERAND (t, 0);
14900 op1 = TREE_OPERAND (t, 1);
14901 op2 = TREE_OPERAND (t, 2);
14902 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14903 return tem ? tem : expr;
14904 default:
14905 break;
14909 switch (code)
14911 case ARRAY_REF:
14913 tree op0 = TREE_OPERAND (t, 0);
14914 tree op1 = TREE_OPERAND (t, 1);
14916 if (TREE_CODE (op1) == INTEGER_CST
14917 && TREE_CODE (op0) == CONSTRUCTOR
14918 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14920 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14921 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14922 unsigned HOST_WIDE_INT begin = 0;
14924 /* Find a matching index by means of a binary search. */
14925 while (begin != end)
14927 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14928 tree index = (*elts)[middle].index;
14930 if (TREE_CODE (index) == INTEGER_CST
14931 && tree_int_cst_lt (index, op1))
14932 begin = middle + 1;
14933 else if (TREE_CODE (index) == INTEGER_CST
14934 && tree_int_cst_lt (op1, index))
14935 end = middle;
14936 else if (TREE_CODE (index) == RANGE_EXPR
14937 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14938 begin = middle + 1;
14939 else if (TREE_CODE (index) == RANGE_EXPR
14940 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14941 end = middle;
14942 else
14943 return (*elts)[middle].value;
14947 return t;
14950 /* Return a VECTOR_CST if possible. */
14951 case CONSTRUCTOR:
14953 tree type = TREE_TYPE (t);
14954 if (TREE_CODE (type) != VECTOR_TYPE)
14955 return t;
14957 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14958 unsigned HOST_WIDE_INT idx, pos = 0;
14959 tree value;
14961 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14963 if (!CONSTANT_CLASS_P (value))
14964 return t;
14965 if (TREE_CODE (value) == VECTOR_CST)
14967 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14968 vec[pos++] = VECTOR_CST_ELT (value, i);
14970 else
14971 vec[pos++] = value;
14973 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14974 vec[pos] = build_zero_cst (TREE_TYPE (type));
14976 return build_vector (type, vec);
14979 case CONST_DECL:
14980 return fold (DECL_INITIAL (t));
14982 default:
14983 return t;
14984 } /* switch (code) */
14987 #ifdef ENABLE_FOLD_CHECKING
14988 #undef fold
14990 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14991 hash_table <pointer_hash <tree_node> >);
14992 static void fold_check_failed (const_tree, const_tree);
14993 void print_fold_checksum (const_tree);
14995 /* When --enable-checking=fold, compute a digest of expr before
14996 and after actual fold call to see if fold did not accidentally
14997 change original expr. */
14999 tree
15000 fold (tree expr)
15002 tree ret;
15003 struct md5_ctx ctx;
15004 unsigned char checksum_before[16], checksum_after[16];
15005 hash_table <pointer_hash <tree_node> > ht;
15007 ht.create (32);
15008 md5_init_ctx (&ctx);
15009 fold_checksum_tree (expr, &ctx, ht);
15010 md5_finish_ctx (&ctx, checksum_before);
15011 ht.empty ();
15013 ret = fold_1 (expr);
15015 md5_init_ctx (&ctx);
15016 fold_checksum_tree (expr, &ctx, ht);
15017 md5_finish_ctx (&ctx, checksum_after);
15018 ht.dispose ();
15020 if (memcmp (checksum_before, checksum_after, 16))
15021 fold_check_failed (expr, ret);
15023 return ret;
15026 void
15027 print_fold_checksum (const_tree expr)
15029 struct md5_ctx ctx;
15030 unsigned char checksum[16], cnt;
15031 hash_table <pointer_hash <tree_node> > ht;
15033 ht.create (32);
15034 md5_init_ctx (&ctx);
15035 fold_checksum_tree (expr, &ctx, ht);
15036 md5_finish_ctx (&ctx, checksum);
15037 ht.dispose ();
15038 for (cnt = 0; cnt < 16; ++cnt)
15039 fprintf (stderr, "%02x", checksum[cnt]);
15040 putc ('\n', stderr);
15043 static void
15044 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
15046 internal_error ("fold check: original tree changed by fold");
15049 static void
15050 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
15051 hash_table <pointer_hash <tree_node> > ht)
15053 tree_node **slot;
15054 enum tree_code code;
15055 union tree_node buf;
15056 int i, len;
15058 recursive_label:
15059 if (expr == NULL)
15060 return;
15061 slot = ht.find_slot (expr, INSERT);
15062 if (*slot != NULL)
15063 return;
15064 *slot = CONST_CAST_TREE (expr);
15065 code = TREE_CODE (expr);
15066 if (TREE_CODE_CLASS (code) == tcc_declaration
15067 && DECL_ASSEMBLER_NAME_SET_P (expr))
15069 /* Allow DECL_ASSEMBLER_NAME to be modified. */
15070 memcpy ((char *) &buf, expr, tree_size (expr));
15071 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
15072 expr = (tree) &buf;
15074 else if (TREE_CODE_CLASS (code) == tcc_type
15075 && (TYPE_POINTER_TO (expr)
15076 || TYPE_REFERENCE_TO (expr)
15077 || TYPE_CACHED_VALUES_P (expr)
15078 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
15079 || TYPE_NEXT_VARIANT (expr)))
15081 /* Allow these fields to be modified. */
15082 tree tmp;
15083 memcpy ((char *) &buf, expr, tree_size (expr));
15084 expr = tmp = (tree) &buf;
15085 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
15086 TYPE_POINTER_TO (tmp) = NULL;
15087 TYPE_REFERENCE_TO (tmp) = NULL;
15088 TYPE_NEXT_VARIANT (tmp) = NULL;
15089 if (TYPE_CACHED_VALUES_P (tmp))
15091 TYPE_CACHED_VALUES_P (tmp) = 0;
15092 TYPE_CACHED_VALUES (tmp) = NULL;
15095 md5_process_bytes (expr, tree_size (expr), ctx);
15096 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
15097 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
15098 if (TREE_CODE_CLASS (code) != tcc_type
15099 && TREE_CODE_CLASS (code) != tcc_declaration
15100 && code != TREE_LIST
15101 && code != SSA_NAME
15102 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
15103 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
15104 switch (TREE_CODE_CLASS (code))
15106 case tcc_constant:
15107 switch (code)
15109 case STRING_CST:
15110 md5_process_bytes (TREE_STRING_POINTER (expr),
15111 TREE_STRING_LENGTH (expr), ctx);
15112 break;
15113 case COMPLEX_CST:
15114 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
15115 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
15116 break;
15117 case VECTOR_CST:
15118 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
15119 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
15120 break;
15121 default:
15122 break;
15124 break;
15125 case tcc_exceptional:
15126 switch (code)
15128 case TREE_LIST:
15129 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
15130 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
15131 expr = TREE_CHAIN (expr);
15132 goto recursive_label;
15133 break;
15134 case TREE_VEC:
15135 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
15136 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
15137 break;
15138 default:
15139 break;
15141 break;
15142 case tcc_expression:
15143 case tcc_reference:
15144 case tcc_comparison:
15145 case tcc_unary:
15146 case tcc_binary:
15147 case tcc_statement:
15148 case tcc_vl_exp:
15149 len = TREE_OPERAND_LENGTH (expr);
15150 for (i = 0; i < len; ++i)
15151 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
15152 break;
15153 case tcc_declaration:
15154 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
15155 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
15156 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
15158 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
15159 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
15160 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
15161 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
15162 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
15164 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
15165 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
15167 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
15169 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
15170 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
15171 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
15173 break;
15174 case tcc_type:
15175 if (TREE_CODE (expr) == ENUMERAL_TYPE)
15176 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
15177 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
15178 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
15179 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
15180 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
15181 if (INTEGRAL_TYPE_P (expr)
15182 || SCALAR_FLOAT_TYPE_P (expr))
15184 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
15185 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
15187 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
15188 if (TREE_CODE (expr) == RECORD_TYPE
15189 || TREE_CODE (expr) == UNION_TYPE
15190 || TREE_CODE (expr) == QUAL_UNION_TYPE)
15191 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
15192 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
15193 break;
15194 default:
15195 break;
15199 /* Helper function for outputting the checksum of a tree T. When
15200 debugging with gdb, you can "define mynext" to be "next" followed
15201 by "call debug_fold_checksum (op0)", then just trace down till the
15202 outputs differ. */
15204 DEBUG_FUNCTION void
15205 debug_fold_checksum (const_tree t)
15207 int i;
15208 unsigned char checksum[16];
15209 struct md5_ctx ctx;
15210 hash_table <pointer_hash <tree_node> > ht;
15211 ht.create (32);
15213 md5_init_ctx (&ctx);
15214 fold_checksum_tree (t, &ctx, ht);
15215 md5_finish_ctx (&ctx, checksum);
15216 ht.empty ();
15218 for (i = 0; i < 16; i++)
15219 fprintf (stderr, "%d ", checksum[i]);
15221 fprintf (stderr, "\n");
15224 #endif
15226 /* Fold a unary tree expression with code CODE of type TYPE with an
15227 operand OP0. LOC is the location of the resulting expression.
15228 Return a folded expression if successful. Otherwise, return a tree
15229 expression with code CODE of type TYPE with an operand OP0. */
15231 tree
15232 fold_build1_stat_loc (location_t loc,
15233 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15235 tree tem;
15236 #ifdef ENABLE_FOLD_CHECKING
15237 unsigned char checksum_before[16], checksum_after[16];
15238 struct md5_ctx ctx;
15239 hash_table <pointer_hash <tree_node> > ht;
15241 ht.create (32);
15242 md5_init_ctx (&ctx);
15243 fold_checksum_tree (op0, &ctx, ht);
15244 md5_finish_ctx (&ctx, checksum_before);
15245 ht.empty ();
15246 #endif
15248 tem = fold_unary_loc (loc, code, type, op0);
15249 if (!tem)
15250 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15252 #ifdef ENABLE_FOLD_CHECKING
15253 md5_init_ctx (&ctx);
15254 fold_checksum_tree (op0, &ctx, ht);
15255 md5_finish_ctx (&ctx, checksum_after);
15256 ht.dispose ();
15258 if (memcmp (checksum_before, checksum_after, 16))
15259 fold_check_failed (op0, tem);
15260 #endif
15261 return tem;
15264 /* Fold a binary tree expression with code CODE of type TYPE with
15265 operands OP0 and OP1. LOC is the location of the resulting
15266 expression. Return a folded expression if successful. Otherwise,
15267 return a tree expression with code CODE of type TYPE with operands
15268 OP0 and OP1. */
15270 tree
15271 fold_build2_stat_loc (location_t loc,
15272 enum tree_code code, tree type, tree op0, tree op1
15273 MEM_STAT_DECL)
15275 tree tem;
15276 #ifdef ENABLE_FOLD_CHECKING
15277 unsigned char checksum_before_op0[16],
15278 checksum_before_op1[16],
15279 checksum_after_op0[16],
15280 checksum_after_op1[16];
15281 struct md5_ctx ctx;
15282 hash_table <pointer_hash <tree_node> > ht;
15284 ht.create (32);
15285 md5_init_ctx (&ctx);
15286 fold_checksum_tree (op0, &ctx, ht);
15287 md5_finish_ctx (&ctx, checksum_before_op0);
15288 ht.empty ();
15290 md5_init_ctx (&ctx);
15291 fold_checksum_tree (op1, &ctx, ht);
15292 md5_finish_ctx (&ctx, checksum_before_op1);
15293 ht.empty ();
15294 #endif
15296 tem = fold_binary_loc (loc, code, type, op0, op1);
15297 if (!tem)
15298 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15300 #ifdef ENABLE_FOLD_CHECKING
15301 md5_init_ctx (&ctx);
15302 fold_checksum_tree (op0, &ctx, ht);
15303 md5_finish_ctx (&ctx, checksum_after_op0);
15304 ht.empty ();
15306 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15307 fold_check_failed (op0, tem);
15309 md5_init_ctx (&ctx);
15310 fold_checksum_tree (op1, &ctx, ht);
15311 md5_finish_ctx (&ctx, checksum_after_op1);
15312 ht.dispose ();
15314 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15315 fold_check_failed (op1, tem);
15316 #endif
15317 return tem;
15320 /* Fold a ternary tree expression with code CODE of type TYPE with
15321 operands OP0, OP1, and OP2. Return a folded expression if
15322 successful. Otherwise, return a tree expression with code CODE of
15323 type TYPE with operands OP0, OP1, and OP2. */
15325 tree
15326 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15327 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15329 tree tem;
15330 #ifdef ENABLE_FOLD_CHECKING
15331 unsigned char checksum_before_op0[16],
15332 checksum_before_op1[16],
15333 checksum_before_op2[16],
15334 checksum_after_op0[16],
15335 checksum_after_op1[16],
15336 checksum_after_op2[16];
15337 struct md5_ctx ctx;
15338 hash_table <pointer_hash <tree_node> > ht;
15340 ht.create (32);
15341 md5_init_ctx (&ctx);
15342 fold_checksum_tree (op0, &ctx, ht);
15343 md5_finish_ctx (&ctx, checksum_before_op0);
15344 ht.empty ();
15346 md5_init_ctx (&ctx);
15347 fold_checksum_tree (op1, &ctx, ht);
15348 md5_finish_ctx (&ctx, checksum_before_op1);
15349 ht.empty ();
15351 md5_init_ctx (&ctx);
15352 fold_checksum_tree (op2, &ctx, ht);
15353 md5_finish_ctx (&ctx, checksum_before_op2);
15354 ht.empty ();
15355 #endif
15357 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15358 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15359 if (!tem)
15360 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15362 #ifdef ENABLE_FOLD_CHECKING
15363 md5_init_ctx (&ctx);
15364 fold_checksum_tree (op0, &ctx, ht);
15365 md5_finish_ctx (&ctx, checksum_after_op0);
15366 ht.empty ();
15368 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15369 fold_check_failed (op0, tem);
15371 md5_init_ctx (&ctx);
15372 fold_checksum_tree (op1, &ctx, ht);
15373 md5_finish_ctx (&ctx, checksum_after_op1);
15374 ht.empty ();
15376 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15377 fold_check_failed (op1, tem);
15379 md5_init_ctx (&ctx);
15380 fold_checksum_tree (op2, &ctx, ht);
15381 md5_finish_ctx (&ctx, checksum_after_op2);
15382 ht.dispose ();
15384 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15385 fold_check_failed (op2, tem);
15386 #endif
15387 return tem;
15390 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15391 arguments in ARGARRAY, and a null static chain.
15392 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15393 of type TYPE from the given operands as constructed by build_call_array. */
15395 tree
15396 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15397 int nargs, tree *argarray)
15399 tree tem;
15400 #ifdef ENABLE_FOLD_CHECKING
15401 unsigned char checksum_before_fn[16],
15402 checksum_before_arglist[16],
15403 checksum_after_fn[16],
15404 checksum_after_arglist[16];
15405 struct md5_ctx ctx;
15406 hash_table <pointer_hash <tree_node> > ht;
15407 int i;
15409 ht.create (32);
15410 md5_init_ctx (&ctx);
15411 fold_checksum_tree (fn, &ctx, ht);
15412 md5_finish_ctx (&ctx, checksum_before_fn);
15413 ht.empty ();
15415 md5_init_ctx (&ctx);
15416 for (i = 0; i < nargs; i++)
15417 fold_checksum_tree (argarray[i], &ctx, ht);
15418 md5_finish_ctx (&ctx, checksum_before_arglist);
15419 ht.empty ();
15420 #endif
15422 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15424 #ifdef ENABLE_FOLD_CHECKING
15425 md5_init_ctx (&ctx);
15426 fold_checksum_tree (fn, &ctx, ht);
15427 md5_finish_ctx (&ctx, checksum_after_fn);
15428 ht.empty ();
15430 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15431 fold_check_failed (fn, tem);
15433 md5_init_ctx (&ctx);
15434 for (i = 0; i < nargs; i++)
15435 fold_checksum_tree (argarray[i], &ctx, ht);
15436 md5_finish_ctx (&ctx, checksum_after_arglist);
15437 ht.dispose ();
15439 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15440 fold_check_failed (NULL_TREE, tem);
15441 #endif
15442 return tem;
15445 /* Perform constant folding and related simplification of initializer
15446 expression EXPR. These behave identically to "fold_buildN" but ignore
15447 potential run-time traps and exceptions that fold must preserve. */
15449 #define START_FOLD_INIT \
15450 int saved_signaling_nans = flag_signaling_nans;\
15451 int saved_trapping_math = flag_trapping_math;\
15452 int saved_rounding_math = flag_rounding_math;\
15453 int saved_trapv = flag_trapv;\
15454 int saved_folding_initializer = folding_initializer;\
15455 flag_signaling_nans = 0;\
15456 flag_trapping_math = 0;\
15457 flag_rounding_math = 0;\
15458 flag_trapv = 0;\
15459 folding_initializer = 1;
15461 #define END_FOLD_INIT \
15462 flag_signaling_nans = saved_signaling_nans;\
15463 flag_trapping_math = saved_trapping_math;\
15464 flag_rounding_math = saved_rounding_math;\
15465 flag_trapv = saved_trapv;\
15466 folding_initializer = saved_folding_initializer;
15468 tree
15469 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15470 tree type, tree op)
15472 tree result;
15473 START_FOLD_INIT;
15475 result = fold_build1_loc (loc, code, type, op);
15477 END_FOLD_INIT;
15478 return result;
15481 tree
15482 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15483 tree type, tree op0, tree op1)
15485 tree result;
15486 START_FOLD_INIT;
15488 result = fold_build2_loc (loc, code, type, op0, op1);
15490 END_FOLD_INIT;
15491 return result;
15494 tree
15495 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15496 int nargs, tree *argarray)
15498 tree result;
15499 START_FOLD_INIT;
15501 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15503 END_FOLD_INIT;
15504 return result;
15507 #undef START_FOLD_INIT
15508 #undef END_FOLD_INIT
15510 /* Determine if first argument is a multiple of second argument. Return 0 if
15511 it is not, or we cannot easily determined it to be.
15513 An example of the sort of thing we care about (at this point; this routine
15514 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15515 fold cases do now) is discovering that
15517 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15519 is a multiple of
15521 SAVE_EXPR (J * 8)
15523 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15525 This code also handles discovering that
15527 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15529 is a multiple of 8 so we don't have to worry about dealing with a
15530 possible remainder.
15532 Note that we *look* inside a SAVE_EXPR only to determine how it was
15533 calculated; it is not safe for fold to do much of anything else with the
15534 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15535 at run time. For example, the latter example above *cannot* be implemented
15536 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15537 evaluation time of the original SAVE_EXPR is not necessarily the same at
15538 the time the new expression is evaluated. The only optimization of this
15539 sort that would be valid is changing
15541 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15543 divided by 8 to
15545 SAVE_EXPR (I) * SAVE_EXPR (J)
15547 (where the same SAVE_EXPR (J) is used in the original and the
15548 transformed version). */
15551 multiple_of_p (tree type, const_tree top, const_tree bottom)
15553 if (operand_equal_p (top, bottom, 0))
15554 return 1;
15556 if (TREE_CODE (type) != INTEGER_TYPE)
15557 return 0;
15559 switch (TREE_CODE (top))
15561 case BIT_AND_EXPR:
15562 /* Bitwise and provides a power of two multiple. If the mask is
15563 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15564 if (!integer_pow2p (bottom))
15565 return 0;
15566 /* FALLTHRU */
15568 case MULT_EXPR:
15569 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15570 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15572 case PLUS_EXPR:
15573 case MINUS_EXPR:
15574 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15575 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15577 case LSHIFT_EXPR:
15578 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15580 tree op1, t1;
15582 op1 = TREE_OPERAND (top, 1);
15583 /* const_binop may not detect overflow correctly,
15584 so check for it explicitly here. */
15585 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15586 > TREE_INT_CST_LOW (op1)
15587 && TREE_INT_CST_HIGH (op1) == 0
15588 && 0 != (t1 = fold_convert (type,
15589 const_binop (LSHIFT_EXPR,
15590 size_one_node,
15591 op1)))
15592 && !TREE_OVERFLOW (t1))
15593 return multiple_of_p (type, t1, bottom);
15595 return 0;
15597 case NOP_EXPR:
15598 /* Can't handle conversions from non-integral or wider integral type. */
15599 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15600 || (TYPE_PRECISION (type)
15601 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15602 return 0;
15604 /* .. fall through ... */
15606 case SAVE_EXPR:
15607 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15609 case COND_EXPR:
15610 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15611 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15613 case INTEGER_CST:
15614 if (TREE_CODE (bottom) != INTEGER_CST
15615 || integer_zerop (bottom)
15616 || (TYPE_UNSIGNED (type)
15617 && (tree_int_cst_sgn (top) < 0
15618 || tree_int_cst_sgn (bottom) < 0)))
15619 return 0;
15620 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15621 top, bottom));
15623 default:
15624 return 0;
15628 /* Return true if CODE or TYPE is known to be non-negative. */
15630 static bool
15631 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15633 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15634 && truth_value_p (code))
15635 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15636 have a signed:1 type (where the value is -1 and 0). */
15637 return true;
15638 return false;
15641 /* Return true if (CODE OP0) is known to be non-negative. If the return
15642 value is based on the assumption that signed overflow is undefined,
15643 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15644 *STRICT_OVERFLOW_P. */
15646 bool
15647 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15648 bool *strict_overflow_p)
15650 if (TYPE_UNSIGNED (type))
15651 return true;
15653 switch (code)
15655 case ABS_EXPR:
15656 /* We can't return 1 if flag_wrapv is set because
15657 ABS_EXPR<INT_MIN> = INT_MIN. */
15658 if (!INTEGRAL_TYPE_P (type))
15659 return true;
15660 if (TYPE_OVERFLOW_UNDEFINED (type))
15662 *strict_overflow_p = true;
15663 return true;
15665 break;
15667 case NON_LVALUE_EXPR:
15668 case FLOAT_EXPR:
15669 case FIX_TRUNC_EXPR:
15670 return tree_expr_nonnegative_warnv_p (op0,
15671 strict_overflow_p);
15673 case NOP_EXPR:
15675 tree inner_type = TREE_TYPE (op0);
15676 tree outer_type = type;
15678 if (TREE_CODE (outer_type) == REAL_TYPE)
15680 if (TREE_CODE (inner_type) == REAL_TYPE)
15681 return tree_expr_nonnegative_warnv_p (op0,
15682 strict_overflow_p);
15683 if (INTEGRAL_TYPE_P (inner_type))
15685 if (TYPE_UNSIGNED (inner_type))
15686 return true;
15687 return tree_expr_nonnegative_warnv_p (op0,
15688 strict_overflow_p);
15691 else if (INTEGRAL_TYPE_P (outer_type))
15693 if (TREE_CODE (inner_type) == REAL_TYPE)
15694 return tree_expr_nonnegative_warnv_p (op0,
15695 strict_overflow_p);
15696 if (INTEGRAL_TYPE_P (inner_type))
15697 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15698 && TYPE_UNSIGNED (inner_type);
15701 break;
15703 default:
15704 return tree_simple_nonnegative_warnv_p (code, type);
15707 /* We don't know sign of `t', so be conservative and return false. */
15708 return false;
15711 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15712 value is based on the assumption that signed overflow is undefined,
15713 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15714 *STRICT_OVERFLOW_P. */
15716 bool
15717 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15718 tree op1, bool *strict_overflow_p)
15720 if (TYPE_UNSIGNED (type))
15721 return true;
15723 switch (code)
15725 case POINTER_PLUS_EXPR:
15726 case PLUS_EXPR:
15727 if (FLOAT_TYPE_P (type))
15728 return (tree_expr_nonnegative_warnv_p (op0,
15729 strict_overflow_p)
15730 && tree_expr_nonnegative_warnv_p (op1,
15731 strict_overflow_p));
15733 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15734 both unsigned and at least 2 bits shorter than the result. */
15735 if (TREE_CODE (type) == INTEGER_TYPE
15736 && TREE_CODE (op0) == NOP_EXPR
15737 && TREE_CODE (op1) == NOP_EXPR)
15739 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15740 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15741 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15742 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15744 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15745 TYPE_PRECISION (inner2)) + 1;
15746 return prec < TYPE_PRECISION (type);
15749 break;
15751 case MULT_EXPR:
15752 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15754 /* x * x is always non-negative for floating point x
15755 or without overflow. */
15756 if (operand_equal_p (op0, op1, 0)
15757 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15758 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15760 if (TYPE_OVERFLOW_UNDEFINED (type))
15761 *strict_overflow_p = true;
15762 return true;
15766 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15767 both unsigned and their total bits is shorter than the result. */
15768 if (TREE_CODE (type) == INTEGER_TYPE
15769 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15770 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15772 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15773 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15774 : TREE_TYPE (op0);
15775 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15776 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15777 : TREE_TYPE (op1);
15779 bool unsigned0 = TYPE_UNSIGNED (inner0);
15780 bool unsigned1 = TYPE_UNSIGNED (inner1);
15782 if (TREE_CODE (op0) == INTEGER_CST)
15783 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15785 if (TREE_CODE (op1) == INTEGER_CST)
15786 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15788 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15789 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15791 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15792 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15793 : TYPE_PRECISION (inner0);
15795 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15796 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15797 : TYPE_PRECISION (inner1);
15799 return precision0 + precision1 < TYPE_PRECISION (type);
15802 return false;
15804 case BIT_AND_EXPR:
15805 case MAX_EXPR:
15806 return (tree_expr_nonnegative_warnv_p (op0,
15807 strict_overflow_p)
15808 || tree_expr_nonnegative_warnv_p (op1,
15809 strict_overflow_p));
15811 case BIT_IOR_EXPR:
15812 case BIT_XOR_EXPR:
15813 case MIN_EXPR:
15814 case RDIV_EXPR:
15815 case TRUNC_DIV_EXPR:
15816 case CEIL_DIV_EXPR:
15817 case FLOOR_DIV_EXPR:
15818 case ROUND_DIV_EXPR:
15819 return (tree_expr_nonnegative_warnv_p (op0,
15820 strict_overflow_p)
15821 && tree_expr_nonnegative_warnv_p (op1,
15822 strict_overflow_p));
15824 case TRUNC_MOD_EXPR:
15825 case CEIL_MOD_EXPR:
15826 case FLOOR_MOD_EXPR:
15827 case ROUND_MOD_EXPR:
15828 return tree_expr_nonnegative_warnv_p (op0,
15829 strict_overflow_p);
15830 default:
15831 return tree_simple_nonnegative_warnv_p (code, type);
15834 /* We don't know sign of `t', so be conservative and return false. */
15835 return false;
15838 /* Return true if T is known to be non-negative. If the return
15839 value is based on the assumption that signed overflow is undefined,
15840 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15841 *STRICT_OVERFLOW_P. */
15843 bool
15844 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15846 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15847 return true;
15849 switch (TREE_CODE (t))
15851 case INTEGER_CST:
15852 return tree_int_cst_sgn (t) >= 0;
15854 case REAL_CST:
15855 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15857 case FIXED_CST:
15858 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15860 case COND_EXPR:
15861 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15862 strict_overflow_p)
15863 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15864 strict_overflow_p));
15865 default:
15866 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15867 TREE_TYPE (t));
15869 /* We don't know sign of `t', so be conservative and return false. */
15870 return false;
15873 /* Return true if T is known to be non-negative. If the return
15874 value is based on the assumption that signed overflow is undefined,
15875 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15876 *STRICT_OVERFLOW_P. */
15878 bool
15879 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15880 tree arg0, tree arg1, bool *strict_overflow_p)
15882 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15883 switch (DECL_FUNCTION_CODE (fndecl))
15885 CASE_FLT_FN (BUILT_IN_ACOS):
15886 CASE_FLT_FN (BUILT_IN_ACOSH):
15887 CASE_FLT_FN (BUILT_IN_CABS):
15888 CASE_FLT_FN (BUILT_IN_COSH):
15889 CASE_FLT_FN (BUILT_IN_ERFC):
15890 CASE_FLT_FN (BUILT_IN_EXP):
15891 CASE_FLT_FN (BUILT_IN_EXP10):
15892 CASE_FLT_FN (BUILT_IN_EXP2):
15893 CASE_FLT_FN (BUILT_IN_FABS):
15894 CASE_FLT_FN (BUILT_IN_FDIM):
15895 CASE_FLT_FN (BUILT_IN_HYPOT):
15896 CASE_FLT_FN (BUILT_IN_POW10):
15897 CASE_INT_FN (BUILT_IN_FFS):
15898 CASE_INT_FN (BUILT_IN_PARITY):
15899 CASE_INT_FN (BUILT_IN_POPCOUNT):
15900 CASE_INT_FN (BUILT_IN_CLZ):
15901 CASE_INT_FN (BUILT_IN_CLRSB):
15902 case BUILT_IN_BSWAP32:
15903 case BUILT_IN_BSWAP64:
15904 /* Always true. */
15905 return true;
15907 CASE_FLT_FN (BUILT_IN_SQRT):
15908 /* sqrt(-0.0) is -0.0. */
15909 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15910 return true;
15911 return tree_expr_nonnegative_warnv_p (arg0,
15912 strict_overflow_p);
15914 CASE_FLT_FN (BUILT_IN_ASINH):
15915 CASE_FLT_FN (BUILT_IN_ATAN):
15916 CASE_FLT_FN (BUILT_IN_ATANH):
15917 CASE_FLT_FN (BUILT_IN_CBRT):
15918 CASE_FLT_FN (BUILT_IN_CEIL):
15919 CASE_FLT_FN (BUILT_IN_ERF):
15920 CASE_FLT_FN (BUILT_IN_EXPM1):
15921 CASE_FLT_FN (BUILT_IN_FLOOR):
15922 CASE_FLT_FN (BUILT_IN_FMOD):
15923 CASE_FLT_FN (BUILT_IN_FREXP):
15924 CASE_FLT_FN (BUILT_IN_ICEIL):
15925 CASE_FLT_FN (BUILT_IN_IFLOOR):
15926 CASE_FLT_FN (BUILT_IN_IRINT):
15927 CASE_FLT_FN (BUILT_IN_IROUND):
15928 CASE_FLT_FN (BUILT_IN_LCEIL):
15929 CASE_FLT_FN (BUILT_IN_LDEXP):
15930 CASE_FLT_FN (BUILT_IN_LFLOOR):
15931 CASE_FLT_FN (BUILT_IN_LLCEIL):
15932 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15933 CASE_FLT_FN (BUILT_IN_LLRINT):
15934 CASE_FLT_FN (BUILT_IN_LLROUND):
15935 CASE_FLT_FN (BUILT_IN_LRINT):
15936 CASE_FLT_FN (BUILT_IN_LROUND):
15937 CASE_FLT_FN (BUILT_IN_MODF):
15938 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15939 CASE_FLT_FN (BUILT_IN_RINT):
15940 CASE_FLT_FN (BUILT_IN_ROUND):
15941 CASE_FLT_FN (BUILT_IN_SCALB):
15942 CASE_FLT_FN (BUILT_IN_SCALBLN):
15943 CASE_FLT_FN (BUILT_IN_SCALBN):
15944 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15945 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15946 CASE_FLT_FN (BUILT_IN_SINH):
15947 CASE_FLT_FN (BUILT_IN_TANH):
15948 CASE_FLT_FN (BUILT_IN_TRUNC):
15949 /* True if the 1st argument is nonnegative. */
15950 return tree_expr_nonnegative_warnv_p (arg0,
15951 strict_overflow_p);
15953 CASE_FLT_FN (BUILT_IN_FMAX):
15954 /* True if the 1st OR 2nd arguments are nonnegative. */
15955 return (tree_expr_nonnegative_warnv_p (arg0,
15956 strict_overflow_p)
15957 || (tree_expr_nonnegative_warnv_p (arg1,
15958 strict_overflow_p)));
15960 CASE_FLT_FN (BUILT_IN_FMIN):
15961 /* True if the 1st AND 2nd arguments are nonnegative. */
15962 return (tree_expr_nonnegative_warnv_p (arg0,
15963 strict_overflow_p)
15964 && (tree_expr_nonnegative_warnv_p (arg1,
15965 strict_overflow_p)));
15967 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15968 /* True if the 2nd argument is nonnegative. */
15969 return tree_expr_nonnegative_warnv_p (arg1,
15970 strict_overflow_p);
15972 CASE_FLT_FN (BUILT_IN_POWI):
15973 /* True if the 1st argument is nonnegative or the second
15974 argument is an even integer. */
15975 if (TREE_CODE (arg1) == INTEGER_CST
15976 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15977 return true;
15978 return tree_expr_nonnegative_warnv_p (arg0,
15979 strict_overflow_p);
15981 CASE_FLT_FN (BUILT_IN_POW):
15982 /* True if the 1st argument is nonnegative or the second
15983 argument is an even integer valued real. */
15984 if (TREE_CODE (arg1) == REAL_CST)
15986 REAL_VALUE_TYPE c;
15987 HOST_WIDE_INT n;
15989 c = TREE_REAL_CST (arg1);
15990 n = real_to_integer (&c);
15991 if ((n & 1) == 0)
15993 REAL_VALUE_TYPE cint;
15994 real_from_integer (&cint, VOIDmode, n,
15995 n < 0 ? -1 : 0, 0);
15996 if (real_identical (&c, &cint))
15997 return true;
16000 return tree_expr_nonnegative_warnv_p (arg0,
16001 strict_overflow_p);
16003 default:
16004 break;
16006 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
16007 type);
16010 /* Return true if T is known to be non-negative. If the return
16011 value is based on the assumption that signed overflow is undefined,
16012 set *STRICT_OVERFLOW_P to true; otherwise, don't change
16013 *STRICT_OVERFLOW_P. */
16015 static bool
16016 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
16018 enum tree_code code = TREE_CODE (t);
16019 if (TYPE_UNSIGNED (TREE_TYPE (t)))
16020 return true;
16022 switch (code)
16024 case TARGET_EXPR:
16026 tree temp = TARGET_EXPR_SLOT (t);
16027 t = TARGET_EXPR_INITIAL (t);
16029 /* If the initializer is non-void, then it's a normal expression
16030 that will be assigned to the slot. */
16031 if (!VOID_TYPE_P (t))
16032 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
16034 /* Otherwise, the initializer sets the slot in some way. One common
16035 way is an assignment statement at the end of the initializer. */
16036 while (1)
16038 if (TREE_CODE (t) == BIND_EXPR)
16039 t = expr_last (BIND_EXPR_BODY (t));
16040 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
16041 || TREE_CODE (t) == TRY_CATCH_EXPR)
16042 t = expr_last (TREE_OPERAND (t, 0));
16043 else if (TREE_CODE (t) == STATEMENT_LIST)
16044 t = expr_last (t);
16045 else
16046 break;
16048 if (TREE_CODE (t) == MODIFY_EXPR
16049 && TREE_OPERAND (t, 0) == temp)
16050 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
16051 strict_overflow_p);
16053 return false;
16056 case CALL_EXPR:
16058 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
16059 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
16061 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
16062 get_callee_fndecl (t),
16063 arg0,
16064 arg1,
16065 strict_overflow_p);
16067 case COMPOUND_EXPR:
16068 case MODIFY_EXPR:
16069 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
16070 strict_overflow_p);
16071 case BIND_EXPR:
16072 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
16073 strict_overflow_p);
16074 case SAVE_EXPR:
16075 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
16076 strict_overflow_p);
16078 default:
16079 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
16080 TREE_TYPE (t));
16083 /* We don't know sign of `t', so be conservative and return false. */
16084 return false;
16087 /* Return true if T is known to be non-negative. If the return
16088 value is based on the assumption that signed overflow is undefined,
16089 set *STRICT_OVERFLOW_P to true; otherwise, don't change
16090 *STRICT_OVERFLOW_P. */
16092 bool
16093 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
16095 enum tree_code code;
16096 if (t == error_mark_node)
16097 return false;
16099 code = TREE_CODE (t);
16100 switch (TREE_CODE_CLASS (code))
16102 case tcc_binary:
16103 case tcc_comparison:
16104 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16105 TREE_TYPE (t),
16106 TREE_OPERAND (t, 0),
16107 TREE_OPERAND (t, 1),
16108 strict_overflow_p);
16110 case tcc_unary:
16111 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16112 TREE_TYPE (t),
16113 TREE_OPERAND (t, 0),
16114 strict_overflow_p);
16116 case tcc_constant:
16117 case tcc_declaration:
16118 case tcc_reference:
16119 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16121 default:
16122 break;
16125 switch (code)
16127 case TRUTH_AND_EXPR:
16128 case TRUTH_OR_EXPR:
16129 case TRUTH_XOR_EXPR:
16130 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16131 TREE_TYPE (t),
16132 TREE_OPERAND (t, 0),
16133 TREE_OPERAND (t, 1),
16134 strict_overflow_p);
16135 case TRUTH_NOT_EXPR:
16136 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16137 TREE_TYPE (t),
16138 TREE_OPERAND (t, 0),
16139 strict_overflow_p);
16141 case COND_EXPR:
16142 case CONSTRUCTOR:
16143 case OBJ_TYPE_REF:
16144 case ASSERT_EXPR:
16145 case ADDR_EXPR:
16146 case WITH_SIZE_EXPR:
16147 case SSA_NAME:
16148 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16150 default:
16151 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
16155 /* Return true if `t' is known to be non-negative. Handle warnings
16156 about undefined signed overflow. */
16158 bool
16159 tree_expr_nonnegative_p (tree t)
16161 bool ret, strict_overflow_p;
16163 strict_overflow_p = false;
16164 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
16165 if (strict_overflow_p)
16166 fold_overflow_warning (("assuming signed overflow does not occur when "
16167 "determining that expression is always "
16168 "non-negative"),
16169 WARN_STRICT_OVERFLOW_MISC);
16170 return ret;
16174 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16175 For floating point we further ensure that T is not denormal.
16176 Similar logic is present in nonzero_address in rtlanal.h.
16178 If the return value is based on the assumption that signed overflow
16179 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16180 change *STRICT_OVERFLOW_P. */
16182 bool
16183 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16184 bool *strict_overflow_p)
16186 switch (code)
16188 case ABS_EXPR:
16189 return tree_expr_nonzero_warnv_p (op0,
16190 strict_overflow_p);
16192 case NOP_EXPR:
16194 tree inner_type = TREE_TYPE (op0);
16195 tree outer_type = type;
16197 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16198 && tree_expr_nonzero_warnv_p (op0,
16199 strict_overflow_p));
16201 break;
16203 case NON_LVALUE_EXPR:
16204 return tree_expr_nonzero_warnv_p (op0,
16205 strict_overflow_p);
16207 default:
16208 break;
16211 return false;
16214 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16215 For floating point we further ensure that T is not denormal.
16216 Similar logic is present in nonzero_address in rtlanal.h.
16218 If the return value is based on the assumption that signed overflow
16219 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16220 change *STRICT_OVERFLOW_P. */
16222 bool
16223 tree_binary_nonzero_warnv_p (enum tree_code code,
16224 tree type,
16225 tree op0,
16226 tree op1, bool *strict_overflow_p)
16228 bool sub_strict_overflow_p;
16229 switch (code)
16231 case POINTER_PLUS_EXPR:
16232 case PLUS_EXPR:
16233 if (TYPE_OVERFLOW_UNDEFINED (type))
16235 /* With the presence of negative values it is hard
16236 to say something. */
16237 sub_strict_overflow_p = false;
16238 if (!tree_expr_nonnegative_warnv_p (op0,
16239 &sub_strict_overflow_p)
16240 || !tree_expr_nonnegative_warnv_p (op1,
16241 &sub_strict_overflow_p))
16242 return false;
16243 /* One of operands must be positive and the other non-negative. */
16244 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16245 overflows, on a twos-complement machine the sum of two
16246 nonnegative numbers can never be zero. */
16247 return (tree_expr_nonzero_warnv_p (op0,
16248 strict_overflow_p)
16249 || tree_expr_nonzero_warnv_p (op1,
16250 strict_overflow_p));
16252 break;
16254 case MULT_EXPR:
16255 if (TYPE_OVERFLOW_UNDEFINED (type))
16257 if (tree_expr_nonzero_warnv_p (op0,
16258 strict_overflow_p)
16259 && tree_expr_nonzero_warnv_p (op1,
16260 strict_overflow_p))
16262 *strict_overflow_p = true;
16263 return true;
16266 break;
16268 case MIN_EXPR:
16269 sub_strict_overflow_p = false;
16270 if (tree_expr_nonzero_warnv_p (op0,
16271 &sub_strict_overflow_p)
16272 && tree_expr_nonzero_warnv_p (op1,
16273 &sub_strict_overflow_p))
16275 if (sub_strict_overflow_p)
16276 *strict_overflow_p = true;
16278 break;
16280 case MAX_EXPR:
16281 sub_strict_overflow_p = false;
16282 if (tree_expr_nonzero_warnv_p (op0,
16283 &sub_strict_overflow_p))
16285 if (sub_strict_overflow_p)
16286 *strict_overflow_p = true;
16288 /* When both operands are nonzero, then MAX must be too. */
16289 if (tree_expr_nonzero_warnv_p (op1,
16290 strict_overflow_p))
16291 return true;
16293 /* MAX where operand 0 is positive is positive. */
16294 return tree_expr_nonnegative_warnv_p (op0,
16295 strict_overflow_p);
16297 /* MAX where operand 1 is positive is positive. */
16298 else if (tree_expr_nonzero_warnv_p (op1,
16299 &sub_strict_overflow_p)
16300 && tree_expr_nonnegative_warnv_p (op1,
16301 &sub_strict_overflow_p))
16303 if (sub_strict_overflow_p)
16304 *strict_overflow_p = true;
16305 return true;
16307 break;
16309 case BIT_IOR_EXPR:
16310 return (tree_expr_nonzero_warnv_p (op1,
16311 strict_overflow_p)
16312 || tree_expr_nonzero_warnv_p (op0,
16313 strict_overflow_p));
16315 default:
16316 break;
16319 return false;
16322 /* Return true when T is an address and is known to be nonzero.
16323 For floating point we further ensure that T is not denormal.
16324 Similar logic is present in nonzero_address in rtlanal.h.
16326 If the return value is based on the assumption that signed overflow
16327 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16328 change *STRICT_OVERFLOW_P. */
16330 bool
16331 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16333 bool sub_strict_overflow_p;
16334 switch (TREE_CODE (t))
16336 case INTEGER_CST:
16337 return !integer_zerop (t);
16339 case ADDR_EXPR:
16341 tree base = TREE_OPERAND (t, 0);
16342 if (!DECL_P (base))
16343 base = get_base_address (base);
16345 if (!base)
16346 return false;
16348 /* Weak declarations may link to NULL. Other things may also be NULL
16349 so protect with -fdelete-null-pointer-checks; but not variables
16350 allocated on the stack. */
16351 if (DECL_P (base)
16352 && (flag_delete_null_pointer_checks
16353 || (DECL_CONTEXT (base)
16354 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16355 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16356 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16358 /* Constants are never weak. */
16359 if (CONSTANT_CLASS_P (base))
16360 return true;
16362 return false;
16365 case COND_EXPR:
16366 sub_strict_overflow_p = false;
16367 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16368 &sub_strict_overflow_p)
16369 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16370 &sub_strict_overflow_p))
16372 if (sub_strict_overflow_p)
16373 *strict_overflow_p = true;
16374 return true;
16376 break;
16378 default:
16379 break;
16381 return false;
16384 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16385 attempt to fold the expression to a constant without modifying TYPE,
16386 OP0 or OP1.
16388 If the expression could be simplified to a constant, then return
16389 the constant. If the expression would not be simplified to a
16390 constant, then return NULL_TREE. */
16392 tree
16393 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16395 tree tem = fold_binary (code, type, op0, op1);
16396 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16399 /* Given the components of a unary expression CODE, TYPE and OP0,
16400 attempt to fold the expression to a constant without modifying
16401 TYPE or OP0.
16403 If the expression could be simplified to a constant, then return
16404 the constant. If the expression would not be simplified to a
16405 constant, then return NULL_TREE. */
16407 tree
16408 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16410 tree tem = fold_unary (code, type, op0);
16411 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16414 /* If EXP represents referencing an element in a constant string
16415 (either via pointer arithmetic or array indexing), return the
16416 tree representing the value accessed, otherwise return NULL. */
16418 tree
16419 fold_read_from_constant_string (tree exp)
16421 if ((TREE_CODE (exp) == INDIRECT_REF
16422 || TREE_CODE (exp) == ARRAY_REF)
16423 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16425 tree exp1 = TREE_OPERAND (exp, 0);
16426 tree index;
16427 tree string;
16428 location_t loc = EXPR_LOCATION (exp);
16430 if (TREE_CODE (exp) == INDIRECT_REF)
16431 string = string_constant (exp1, &index);
16432 else
16434 tree low_bound = array_ref_low_bound (exp);
16435 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16437 /* Optimize the special-case of a zero lower bound.
16439 We convert the low_bound to sizetype to avoid some problems
16440 with constant folding. (E.g. suppose the lower bound is 1,
16441 and its mode is QI. Without the conversion,l (ARRAY
16442 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16443 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16444 if (! integer_zerop (low_bound))
16445 index = size_diffop_loc (loc, index,
16446 fold_convert_loc (loc, sizetype, low_bound));
16448 string = exp1;
16451 if (string
16452 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16453 && TREE_CODE (string) == STRING_CST
16454 && TREE_CODE (index) == INTEGER_CST
16455 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16456 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16457 == MODE_INT)
16458 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16459 return build_int_cst_type (TREE_TYPE (exp),
16460 (TREE_STRING_POINTER (string)
16461 [TREE_INT_CST_LOW (index)]));
16463 return NULL;
16466 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16467 an integer constant, real, or fixed-point constant.
16469 TYPE is the type of the result. */
16471 static tree
16472 fold_negate_const (tree arg0, tree type)
16474 tree t = NULL_TREE;
16476 switch (TREE_CODE (arg0))
16478 case INTEGER_CST:
16480 double_int val = tree_to_double_int (arg0);
16481 bool overflow;
16482 val = val.neg_with_overflow (&overflow);
16483 t = force_fit_type_double (type, val, 1,
16484 (overflow | TREE_OVERFLOW (arg0))
16485 && !TYPE_UNSIGNED (type));
16486 break;
16489 case REAL_CST:
16490 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16491 break;
16493 case FIXED_CST:
16495 FIXED_VALUE_TYPE f;
16496 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16497 &(TREE_FIXED_CST (arg0)), NULL,
16498 TYPE_SATURATING (type));
16499 t = build_fixed (type, f);
16500 /* Propagate overflow flags. */
16501 if (overflow_p | TREE_OVERFLOW (arg0))
16502 TREE_OVERFLOW (t) = 1;
16503 break;
16506 default:
16507 gcc_unreachable ();
16510 return t;
16513 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16514 an integer constant or real constant.
16516 TYPE is the type of the result. */
16518 tree
16519 fold_abs_const (tree arg0, tree type)
16521 tree t = NULL_TREE;
16523 switch (TREE_CODE (arg0))
16525 case INTEGER_CST:
16527 double_int val = tree_to_double_int (arg0);
16529 /* If the value is unsigned or non-negative, then the absolute value
16530 is the same as the ordinary value. */
16531 if (TYPE_UNSIGNED (type)
16532 || !val.is_negative ())
16533 t = arg0;
16535 /* If the value is negative, then the absolute value is
16536 its negation. */
16537 else
16539 bool overflow;
16540 val = val.neg_with_overflow (&overflow);
16541 t = force_fit_type_double (type, val, -1,
16542 overflow | TREE_OVERFLOW (arg0));
16545 break;
16547 case REAL_CST:
16548 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16549 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16550 else
16551 t = arg0;
16552 break;
16554 default:
16555 gcc_unreachable ();
16558 return t;
16561 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16562 constant. TYPE is the type of the result. */
16564 static tree
16565 fold_not_const (const_tree arg0, tree type)
16567 double_int val;
16569 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16571 val = ~tree_to_double_int (arg0);
16572 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16575 /* Given CODE, a relational operator, the target type, TYPE and two
16576 constant operands OP0 and OP1, return the result of the
16577 relational operation. If the result is not a compile time
16578 constant, then return NULL_TREE. */
16580 static tree
16581 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16583 int result, invert;
16585 /* From here on, the only cases we handle are when the result is
16586 known to be a constant. */
16588 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16590 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16591 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16593 /* Handle the cases where either operand is a NaN. */
16594 if (real_isnan (c0) || real_isnan (c1))
16596 switch (code)
16598 case EQ_EXPR:
16599 case ORDERED_EXPR:
16600 result = 0;
16601 break;
16603 case NE_EXPR:
16604 case UNORDERED_EXPR:
16605 case UNLT_EXPR:
16606 case UNLE_EXPR:
16607 case UNGT_EXPR:
16608 case UNGE_EXPR:
16609 case UNEQ_EXPR:
16610 result = 1;
16611 break;
16613 case LT_EXPR:
16614 case LE_EXPR:
16615 case GT_EXPR:
16616 case GE_EXPR:
16617 case LTGT_EXPR:
16618 if (flag_trapping_math)
16619 return NULL_TREE;
16620 result = 0;
16621 break;
16623 default:
16624 gcc_unreachable ();
16627 return constant_boolean_node (result, type);
16630 return constant_boolean_node (real_compare (code, c0, c1), type);
16633 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16635 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16636 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16637 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16640 /* Handle equality/inequality of complex constants. */
16641 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16643 tree rcond = fold_relational_const (code, type,
16644 TREE_REALPART (op0),
16645 TREE_REALPART (op1));
16646 tree icond = fold_relational_const (code, type,
16647 TREE_IMAGPART (op0),
16648 TREE_IMAGPART (op1));
16649 if (code == EQ_EXPR)
16650 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16651 else if (code == NE_EXPR)
16652 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16653 else
16654 return NULL_TREE;
16657 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16659 unsigned count = VECTOR_CST_NELTS (op0);
16660 tree *elts = XALLOCAVEC (tree, count);
16661 gcc_assert (VECTOR_CST_NELTS (op1) == count
16662 && TYPE_VECTOR_SUBPARTS (type) == count);
16664 for (unsigned i = 0; i < count; i++)
16666 tree elem_type = TREE_TYPE (type);
16667 tree elem0 = VECTOR_CST_ELT (op0, i);
16668 tree elem1 = VECTOR_CST_ELT (op1, i);
16670 tree tem = fold_relational_const (code, elem_type,
16671 elem0, elem1);
16673 if (tem == NULL_TREE)
16674 return NULL_TREE;
16676 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16679 return build_vector (type, elts);
16682 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16684 To compute GT, swap the arguments and do LT.
16685 To compute GE, do LT and invert the result.
16686 To compute LE, swap the arguments, do LT and invert the result.
16687 To compute NE, do EQ and invert the result.
16689 Therefore, the code below must handle only EQ and LT. */
16691 if (code == LE_EXPR || code == GT_EXPR)
16693 tree tem = op0;
16694 op0 = op1;
16695 op1 = tem;
16696 code = swap_tree_comparison (code);
16699 /* Note that it is safe to invert for real values here because we
16700 have already handled the one case that it matters. */
16702 invert = 0;
16703 if (code == NE_EXPR || code == GE_EXPR)
16705 invert = 1;
16706 code = invert_tree_comparison (code, false);
16709 /* Compute a result for LT or EQ if args permit;
16710 Otherwise return T. */
16711 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16713 if (code == EQ_EXPR)
16714 result = tree_int_cst_equal (op0, op1);
16715 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16716 result = INT_CST_LT_UNSIGNED (op0, op1);
16717 else
16718 result = INT_CST_LT (op0, op1);
16720 else
16721 return NULL_TREE;
16723 if (invert)
16724 result ^= 1;
16725 return constant_boolean_node (result, type);
16728 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16729 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16730 itself. */
16732 tree
16733 fold_build_cleanup_point_expr (tree type, tree expr)
16735 /* If the expression does not have side effects then we don't have to wrap
16736 it with a cleanup point expression. */
16737 if (!TREE_SIDE_EFFECTS (expr))
16738 return expr;
16740 /* If the expression is a return, check to see if the expression inside the
16741 return has no side effects or the right hand side of the modify expression
16742 inside the return. If either don't have side effects set we don't need to
16743 wrap the expression in a cleanup point expression. Note we don't check the
16744 left hand side of the modify because it should always be a return decl. */
16745 if (TREE_CODE (expr) == RETURN_EXPR)
16747 tree op = TREE_OPERAND (expr, 0);
16748 if (!op || !TREE_SIDE_EFFECTS (op))
16749 return expr;
16750 op = TREE_OPERAND (op, 1);
16751 if (!TREE_SIDE_EFFECTS (op))
16752 return expr;
16755 return build1 (CLEANUP_POINT_EXPR, type, expr);
16758 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16759 of an indirection through OP0, or NULL_TREE if no simplification is
16760 possible. */
16762 tree
16763 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16765 tree sub = op0;
16766 tree subtype;
16768 STRIP_NOPS (sub);
16769 subtype = TREE_TYPE (sub);
16770 if (!POINTER_TYPE_P (subtype))
16771 return NULL_TREE;
16773 if (TREE_CODE (sub) == ADDR_EXPR)
16775 tree op = TREE_OPERAND (sub, 0);
16776 tree optype = TREE_TYPE (op);
16777 /* *&CONST_DECL -> to the value of the const decl. */
16778 if (TREE_CODE (op) == CONST_DECL)
16779 return DECL_INITIAL (op);
16780 /* *&p => p; make sure to handle *&"str"[cst] here. */
16781 if (type == optype)
16783 tree fop = fold_read_from_constant_string (op);
16784 if (fop)
16785 return fop;
16786 else
16787 return op;
16789 /* *(foo *)&fooarray => fooarray[0] */
16790 else if (TREE_CODE (optype) == ARRAY_TYPE
16791 && type == TREE_TYPE (optype)
16792 && (!in_gimple_form
16793 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16795 tree type_domain = TYPE_DOMAIN (optype);
16796 tree min_val = size_zero_node;
16797 if (type_domain && TYPE_MIN_VALUE (type_domain))
16798 min_val = TYPE_MIN_VALUE (type_domain);
16799 if (in_gimple_form
16800 && TREE_CODE (min_val) != INTEGER_CST)
16801 return NULL_TREE;
16802 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16803 NULL_TREE, NULL_TREE);
16805 /* *(foo *)&complexfoo => __real__ complexfoo */
16806 else if (TREE_CODE (optype) == COMPLEX_TYPE
16807 && type == TREE_TYPE (optype))
16808 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16809 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16810 else if (TREE_CODE (optype) == VECTOR_TYPE
16811 && type == TREE_TYPE (optype))
16813 tree part_width = TYPE_SIZE (type);
16814 tree index = bitsize_int (0);
16815 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16819 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16820 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16822 tree op00 = TREE_OPERAND (sub, 0);
16823 tree op01 = TREE_OPERAND (sub, 1);
16825 STRIP_NOPS (op00);
16826 if (TREE_CODE (op00) == ADDR_EXPR)
16828 tree op00type;
16829 op00 = TREE_OPERAND (op00, 0);
16830 op00type = TREE_TYPE (op00);
16832 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16833 if (TREE_CODE (op00type) == VECTOR_TYPE
16834 && type == TREE_TYPE (op00type))
16836 HOST_WIDE_INT offset = tree_to_shwi (op01);
16837 tree part_width = TYPE_SIZE (type);
16838 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16839 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16840 tree index = bitsize_int (indexi);
16842 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16843 return fold_build3_loc (loc,
16844 BIT_FIELD_REF, type, op00,
16845 part_width, index);
16848 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16849 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16850 && type == TREE_TYPE (op00type))
16852 tree size = TYPE_SIZE_UNIT (type);
16853 if (tree_int_cst_equal (size, op01))
16854 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16856 /* ((foo *)&fooarray)[1] => fooarray[1] */
16857 else if (TREE_CODE (op00type) == ARRAY_TYPE
16858 && type == TREE_TYPE (op00type))
16860 tree type_domain = TYPE_DOMAIN (op00type);
16861 tree min_val = size_zero_node;
16862 if (type_domain && TYPE_MIN_VALUE (type_domain))
16863 min_val = TYPE_MIN_VALUE (type_domain);
16864 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16865 TYPE_SIZE_UNIT (type));
16866 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16867 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16868 NULL_TREE, NULL_TREE);
16873 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16874 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16875 && type == TREE_TYPE (TREE_TYPE (subtype))
16876 && (!in_gimple_form
16877 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16879 tree type_domain;
16880 tree min_val = size_zero_node;
16881 sub = build_fold_indirect_ref_loc (loc, sub);
16882 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16883 if (type_domain && TYPE_MIN_VALUE (type_domain))
16884 min_val = TYPE_MIN_VALUE (type_domain);
16885 if (in_gimple_form
16886 && TREE_CODE (min_val) != INTEGER_CST)
16887 return NULL_TREE;
16888 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16889 NULL_TREE);
16892 return NULL_TREE;
16895 /* Builds an expression for an indirection through T, simplifying some
16896 cases. */
16898 tree
16899 build_fold_indirect_ref_loc (location_t loc, tree t)
16901 tree type = TREE_TYPE (TREE_TYPE (t));
16902 tree sub = fold_indirect_ref_1 (loc, type, t);
16904 if (sub)
16905 return sub;
16907 return build1_loc (loc, INDIRECT_REF, type, t);
16910 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16912 tree
16913 fold_indirect_ref_loc (location_t loc, tree t)
16915 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16917 if (sub)
16918 return sub;
16919 else
16920 return t;
16923 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16924 whose result is ignored. The type of the returned tree need not be
16925 the same as the original expression. */
16927 tree
16928 fold_ignored_result (tree t)
16930 if (!TREE_SIDE_EFFECTS (t))
16931 return integer_zero_node;
16933 for (;;)
16934 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16936 case tcc_unary:
16937 t = TREE_OPERAND (t, 0);
16938 break;
16940 case tcc_binary:
16941 case tcc_comparison:
16942 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16943 t = TREE_OPERAND (t, 0);
16944 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16945 t = TREE_OPERAND (t, 1);
16946 else
16947 return t;
16948 break;
16950 case tcc_expression:
16951 switch (TREE_CODE (t))
16953 case COMPOUND_EXPR:
16954 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16955 return t;
16956 t = TREE_OPERAND (t, 0);
16957 break;
16959 case COND_EXPR:
16960 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16961 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16962 return t;
16963 t = TREE_OPERAND (t, 0);
16964 break;
16966 default:
16967 return t;
16969 break;
16971 default:
16972 return t;
16976 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16977 This can only be applied to objects of a sizetype. */
16979 tree
16980 round_up_loc (location_t loc, tree value, int divisor)
16982 tree div = NULL_TREE;
16984 gcc_assert (divisor > 0);
16985 if (divisor == 1)
16986 return value;
16988 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16989 have to do anything. Only do this when we are not given a const,
16990 because in that case, this check is more expensive than just
16991 doing it. */
16992 if (TREE_CODE (value) != INTEGER_CST)
16994 div = build_int_cst (TREE_TYPE (value), divisor);
16996 if (multiple_of_p (TREE_TYPE (value), value, div))
16997 return value;
17000 /* If divisor is a power of two, simplify this to bit manipulation. */
17001 if (divisor == (divisor & -divisor))
17003 if (TREE_CODE (value) == INTEGER_CST)
17005 double_int val = tree_to_double_int (value);
17006 bool overflow_p;
17008 if ((val.low & (divisor - 1)) == 0)
17009 return value;
17011 overflow_p = TREE_OVERFLOW (value);
17012 val.low &= ~(divisor - 1);
17013 val.low += divisor;
17014 if (val.low == 0)
17016 val.high++;
17017 if (val.high == 0)
17018 overflow_p = true;
17021 return force_fit_type_double (TREE_TYPE (value), val,
17022 -1, overflow_p);
17024 else
17026 tree t;
17028 t = build_int_cst (TREE_TYPE (value), divisor - 1);
17029 value = size_binop_loc (loc, PLUS_EXPR, value, t);
17030 t = build_int_cst (TREE_TYPE (value), -divisor);
17031 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
17034 else
17036 if (!div)
17037 div = build_int_cst (TREE_TYPE (value), divisor);
17038 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
17039 value = size_binop_loc (loc, MULT_EXPR, value, div);
17042 return value;
17045 /* Likewise, but round down. */
17047 tree
17048 round_down_loc (location_t loc, tree value, int divisor)
17050 tree div = NULL_TREE;
17052 gcc_assert (divisor > 0);
17053 if (divisor == 1)
17054 return value;
17056 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
17057 have to do anything. Only do this when we are not given a const,
17058 because in that case, this check is more expensive than just
17059 doing it. */
17060 if (TREE_CODE (value) != INTEGER_CST)
17062 div = build_int_cst (TREE_TYPE (value), divisor);
17064 if (multiple_of_p (TREE_TYPE (value), value, div))
17065 return value;
17068 /* If divisor is a power of two, simplify this to bit manipulation. */
17069 if (divisor == (divisor & -divisor))
17071 tree t;
17073 t = build_int_cst (TREE_TYPE (value), -divisor);
17074 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
17076 else
17078 if (!div)
17079 div = build_int_cst (TREE_TYPE (value), divisor);
17080 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
17081 value = size_binop_loc (loc, MULT_EXPR, value, div);
17084 return value;
17087 /* Returns the pointer to the base of the object addressed by EXP and
17088 extracts the information about the offset of the access, storing it
17089 to PBITPOS and POFFSET. */
17091 static tree
17092 split_address_to_core_and_offset (tree exp,
17093 HOST_WIDE_INT *pbitpos, tree *poffset)
17095 tree core;
17096 enum machine_mode mode;
17097 int unsignedp, volatilep;
17098 HOST_WIDE_INT bitsize;
17099 location_t loc = EXPR_LOCATION (exp);
17101 if (TREE_CODE (exp) == ADDR_EXPR)
17103 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
17104 poffset, &mode, &unsignedp, &volatilep,
17105 false);
17106 core = build_fold_addr_expr_loc (loc, core);
17108 else
17110 core = exp;
17111 *pbitpos = 0;
17112 *poffset = NULL_TREE;
17115 return core;
17118 /* Returns true if addresses of E1 and E2 differ by a constant, false
17119 otherwise. If they do, E1 - E2 is stored in *DIFF. */
17121 bool
17122 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
17124 tree core1, core2;
17125 HOST_WIDE_INT bitpos1, bitpos2;
17126 tree toffset1, toffset2, tdiff, type;
17128 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17129 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17131 if (bitpos1 % BITS_PER_UNIT != 0
17132 || bitpos2 % BITS_PER_UNIT != 0
17133 || !operand_equal_p (core1, core2, 0))
17134 return false;
17136 if (toffset1 && toffset2)
17138 type = TREE_TYPE (toffset1);
17139 if (type != TREE_TYPE (toffset2))
17140 toffset2 = fold_convert (type, toffset2);
17142 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17143 if (!cst_and_fits_in_hwi (tdiff))
17144 return false;
17146 *diff = int_cst_value (tdiff);
17148 else if (toffset1 || toffset2)
17150 /* If only one of the offsets is non-constant, the difference cannot
17151 be a constant. */
17152 return false;
17154 else
17155 *diff = 0;
17157 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17158 return true;
17161 /* Simplify the floating point expression EXP when the sign of the
17162 result is not significant. Return NULL_TREE if no simplification
17163 is possible. */
17165 tree
17166 fold_strip_sign_ops (tree exp)
17168 tree arg0, arg1;
17169 location_t loc = EXPR_LOCATION (exp);
17171 switch (TREE_CODE (exp))
17173 case ABS_EXPR:
17174 case NEGATE_EXPR:
17175 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17176 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17178 case MULT_EXPR:
17179 case RDIV_EXPR:
17180 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17181 return NULL_TREE;
17182 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17183 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17184 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17185 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17186 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17187 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17188 break;
17190 case COMPOUND_EXPR:
17191 arg0 = TREE_OPERAND (exp, 0);
17192 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17193 if (arg1)
17194 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17195 break;
17197 case COND_EXPR:
17198 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17199 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17200 if (arg0 || arg1)
17201 return fold_build3_loc (loc,
17202 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17203 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17204 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17205 break;
17207 case CALL_EXPR:
17209 const enum built_in_function fcode = builtin_mathfn_code (exp);
17210 switch (fcode)
17212 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17213 /* Strip copysign function call, return the 1st argument. */
17214 arg0 = CALL_EXPR_ARG (exp, 0);
17215 arg1 = CALL_EXPR_ARG (exp, 1);
17216 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17218 default:
17219 /* Strip sign ops from the argument of "odd" math functions. */
17220 if (negate_mathfn_p (fcode))
17222 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17223 if (arg0)
17224 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17226 break;
17229 break;
17231 default:
17232 break;
17234 return NULL_TREE;