Merge from trunk: 215733-215743
[official-gcc.git] / gcc-4_9 / gcc / fold-const.c
blob1f0d1988df6c5811790daebd1d0e9415b8fe570c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
72 /* Nonzero if we are folding constants inside an initializer; zero
73 otherwise. */
74 int folding_initializer = 0;
76 /* The following constants represent a bit based encoding of GCC's
77 comparison operators. This encoding simplifies transformations
78 on relational comparison operators, such as AND and OR. */
79 enum comparison_code {
80 COMPCODE_FALSE = 0,
81 COMPCODE_LT = 1,
82 COMPCODE_EQ = 2,
83 COMPCODE_LE = 3,
84 COMPCODE_GT = 4,
85 COMPCODE_LTGT = 5,
86 COMPCODE_GE = 6,
87 COMPCODE_ORD = 7,
88 COMPCODE_UNORD = 8,
89 COMPCODE_UNLT = 9,
90 COMPCODE_UNEQ = 10,
91 COMPCODE_UNLE = 11,
92 COMPCODE_UNGT = 12,
93 COMPCODE_NE = 13,
94 COMPCODE_UNGE = 14,
95 COMPCODE_TRUE = 15
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
130 tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static tree fold_mathfn_compare (location_t,
138 enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
151 static location_t
152 expr_location_or (tree t, location_t loc)
154 location_t tloc = EXPR_LOCATION (t);
155 return tloc == UNKNOWN_LOCATION ? loc : tloc;
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
161 static inline tree
162 protected_set_expr_location_unshare (tree x, location_t loc)
164 if (CAN_HAVE_LOCATION_P (x)
165 && EXPR_LOCATION (x) != loc
166 && !(TREE_CODE (x) == SAVE_EXPR
167 || TREE_CODE (x) == TARGET_EXPR
168 || TREE_CODE (x) == BIND_EXPR))
170 x = copy_node (x);
171 SET_EXPR_LOCATION (x, loc);
173 return x;
176 /* If ARG2 divides ARG1 with zero remainder, carries out the division
177 of type CODE and returns the quotient.
178 Otherwise returns NULL_TREE. */
180 tree
181 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
183 double_int quo, rem;
184 int uns;
186 /* The sign of the division is according to operand two, that
187 does the correct thing for POINTER_PLUS_EXPR where we want
188 a signed division. */
189 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
191 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
192 uns, code, &rem);
194 if (rem.is_zero ())
195 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
197 return NULL_TREE;
200 /* This is nonzero if we should defer warnings about undefined
201 overflow. This facility exists because these warnings are a
202 special case. The code to estimate loop iterations does not want
203 to issue any warnings, since it works with expressions which do not
204 occur in user code. Various bits of cleanup code call fold(), but
205 only use the result if it has certain characteristics (e.g., is a
206 constant); that code only wants to issue a warning if the result is
207 used. */
209 static int fold_deferring_overflow_warnings;
211 /* If a warning about undefined overflow is deferred, this is the
212 warning. Note that this may cause us to turn two warnings into
213 one, but that is fine since it is sufficient to only give one
214 warning per expression. */
216 static const char* fold_deferred_overflow_warning;
218 /* If a warning about undefined overflow is deferred, this is the
219 level at which the warning should be emitted. */
221 static enum warn_strict_overflow_code fold_deferred_overflow_code;
223 /* Start deferring overflow warnings. We could use a stack here to
224 permit nested calls, but at present it is not necessary. */
226 void
227 fold_defer_overflow_warnings (void)
229 ++fold_deferring_overflow_warnings;
232 /* Stop deferring overflow warnings. If there is a pending warning,
233 and ISSUE is true, then issue the warning if appropriate. STMT is
234 the statement with which the warning should be associated (used for
235 location information); STMT may be NULL. CODE is the level of the
236 warning--a warn_strict_overflow_code value. This function will use
237 the smaller of CODE and the deferred code when deciding whether to
238 issue the warning. CODE may be zero to mean to always use the
239 deferred code. */
241 void
242 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
244 const char *warnmsg;
245 location_t locus;
247 gcc_assert (fold_deferring_overflow_warnings > 0);
248 --fold_deferring_overflow_warnings;
249 if (fold_deferring_overflow_warnings > 0)
251 if (fold_deferred_overflow_warning != NULL
252 && code != 0
253 && code < (int) fold_deferred_overflow_code)
254 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
255 return;
258 warnmsg = fold_deferred_overflow_warning;
259 fold_deferred_overflow_warning = NULL;
261 if (!issue || warnmsg == NULL)
262 return;
264 if (gimple_no_warning_p (stmt))
265 return;
267 /* Use the smallest code level when deciding to issue the
268 warning. */
269 if (code == 0 || code > (int) fold_deferred_overflow_code)
270 code = fold_deferred_overflow_code;
272 if (!issue_strict_overflow_warning (code))
273 return;
275 if (stmt == NULL)
276 locus = input_location;
277 else
278 locus = gimple_location (stmt);
279 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
282 /* Stop deferring overflow warnings, ignoring any deferred
283 warnings. */
285 void
286 fold_undefer_and_ignore_overflow_warnings (void)
288 fold_undefer_overflow_warnings (false, NULL, 0);
291 /* Whether we are deferring overflow warnings. */
293 bool
294 fold_deferring_overflow_warnings_p (void)
296 return fold_deferring_overflow_warnings > 0;
299 /* This is called when we fold something based on the fact that signed
300 overflow is undefined. */
302 static void
303 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
305 if (fold_deferring_overflow_warnings > 0)
307 if (fold_deferred_overflow_warning == NULL
308 || wc < fold_deferred_overflow_code)
310 fold_deferred_overflow_warning = gmsgid;
311 fold_deferred_overflow_code = wc;
314 else if (issue_strict_overflow_warning (wc))
315 warning (OPT_Wstrict_overflow, gmsgid);
318 /* Return true if the built-in mathematical function specified by CODE
319 is odd, i.e. -f(x) == f(-x). */
321 static bool
322 negate_mathfn_p (enum built_in_function code)
324 switch (code)
326 CASE_FLT_FN (BUILT_IN_ASIN):
327 CASE_FLT_FN (BUILT_IN_ASINH):
328 CASE_FLT_FN (BUILT_IN_ATAN):
329 CASE_FLT_FN (BUILT_IN_ATANH):
330 CASE_FLT_FN (BUILT_IN_CASIN):
331 CASE_FLT_FN (BUILT_IN_CASINH):
332 CASE_FLT_FN (BUILT_IN_CATAN):
333 CASE_FLT_FN (BUILT_IN_CATANH):
334 CASE_FLT_FN (BUILT_IN_CBRT):
335 CASE_FLT_FN (BUILT_IN_CPROJ):
336 CASE_FLT_FN (BUILT_IN_CSIN):
337 CASE_FLT_FN (BUILT_IN_CSINH):
338 CASE_FLT_FN (BUILT_IN_CTAN):
339 CASE_FLT_FN (BUILT_IN_CTANH):
340 CASE_FLT_FN (BUILT_IN_ERF):
341 CASE_FLT_FN (BUILT_IN_LLROUND):
342 CASE_FLT_FN (BUILT_IN_LROUND):
343 CASE_FLT_FN (BUILT_IN_ROUND):
344 CASE_FLT_FN (BUILT_IN_SIN):
345 CASE_FLT_FN (BUILT_IN_SINH):
346 CASE_FLT_FN (BUILT_IN_TAN):
347 CASE_FLT_FN (BUILT_IN_TANH):
348 CASE_FLT_FN (BUILT_IN_TRUNC):
349 return true;
351 CASE_FLT_FN (BUILT_IN_LLRINT):
352 CASE_FLT_FN (BUILT_IN_LRINT):
353 CASE_FLT_FN (BUILT_IN_NEARBYINT):
354 CASE_FLT_FN (BUILT_IN_RINT):
355 return !flag_rounding_math;
357 default:
358 break;
360 return false;
363 /* Check whether we may negate an integer constant T without causing
364 overflow. */
366 bool
367 may_negate_without_overflow_p (const_tree t)
369 unsigned HOST_WIDE_INT val;
370 unsigned int prec;
371 tree type;
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
379 prec = TYPE_PRECISION (type);
380 if (prec > HOST_BITS_PER_WIDE_INT)
382 if (TREE_INT_CST_LOW (t) != 0)
383 return true;
384 prec -= HOST_BITS_PER_WIDE_INT;
385 val = TREE_INT_CST_HIGH (t);
387 else
388 val = TREE_INT_CST_LOW (t);
389 if (prec < HOST_BITS_PER_WIDE_INT)
390 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
391 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
394 /* Determine whether an expression T can be cheaply negated using
395 the function negate_expr without introducing undefined overflow. */
397 static bool
398 negate_expr_p (tree t)
400 tree type;
402 if (t == 0)
403 return false;
405 type = TREE_TYPE (t);
407 STRIP_SIGN_NOPS (t);
408 switch (TREE_CODE (t))
410 case INTEGER_CST:
411 if (TYPE_OVERFLOW_WRAPS (type))
412 return true;
414 /* Check that -CST will not overflow type. */
415 return may_negate_without_overflow_p (t);
416 case BIT_NOT_EXPR:
417 return (INTEGRAL_TYPE_P (type)
418 && TYPE_OVERFLOW_WRAPS (type));
420 case FIXED_CST:
421 case NEGATE_EXPR:
422 return true;
424 case REAL_CST:
425 /* We want to canonicalize to positive real constants. Pretend
426 that only negative ones can be easily negated. */
427 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
429 case COMPLEX_CST:
430 return negate_expr_p (TREE_REALPART (t))
431 && negate_expr_p (TREE_IMAGPART (t));
433 case VECTOR_CST:
435 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
436 return true;
438 int count = TYPE_VECTOR_SUBPARTS (type), i;
440 for (i = 0; i < count; i++)
441 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
442 return false;
444 return true;
447 case COMPLEX_EXPR:
448 return negate_expr_p (TREE_OPERAND (t, 0))
449 && negate_expr_p (TREE_OPERAND (t, 1));
451 case CONJ_EXPR:
452 return negate_expr_p (TREE_OPERAND (t, 0));
454 case PLUS_EXPR:
455 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
456 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
457 return false;
458 /* -(A + B) -> (-B) - A. */
459 if (negate_expr_p (TREE_OPERAND (t, 1))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1)))
462 return true;
463 /* -(A + B) -> (-A) - B. */
464 return negate_expr_p (TREE_OPERAND (t, 0));
466 case MINUS_EXPR:
467 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
468 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
469 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1));
473 case MULT_EXPR:
474 if (TYPE_UNSIGNED (TREE_TYPE (t)))
475 break;
477 /* Fall through. */
479 case RDIV_EXPR:
480 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
481 return negate_expr_p (TREE_OPERAND (t, 1))
482 || negate_expr_p (TREE_OPERAND (t, 0));
483 break;
485 case TRUNC_DIV_EXPR:
486 case ROUND_DIV_EXPR:
487 case EXACT_DIV_EXPR:
488 /* In general we can't negate A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. But if overflow is
491 undefined, we can negate, because - (INT_MIN / 1) is an
492 overflow. */
493 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
495 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
496 break;
497 /* If overflow is undefined then we have to be careful because
498 we ask whether it's ok to associate the negate with the
499 division which is not ok for example for
500 -((a - b) / c) where (-(a - b)) / c may invoke undefined
501 overflow because of negating INT_MIN. So do not use
502 negate_expr_p here but open-code the two important cases. */
503 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
504 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
505 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
506 return true;
508 else if (negate_expr_p (TREE_OPERAND (t, 0)))
509 return true;
510 return negate_expr_p (TREE_OPERAND (t, 1));
512 case NOP_EXPR:
513 /* Negate -((double)float) as (double)(-float). */
514 if (TREE_CODE (type) == REAL_TYPE)
516 tree tem = strip_float_extensions (t);
517 if (tem != t)
518 return negate_expr_p (tem);
520 break;
522 case CALL_EXPR:
523 /* Negate -f(x) as f(-x). */
524 if (negate_mathfn_p (builtin_mathfn_code (t)))
525 return negate_expr_p (CALL_EXPR_ARG (t, 0));
526 break;
528 case RSHIFT_EXPR:
529 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
530 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
532 tree op1 = TREE_OPERAND (t, 1);
533 if (TREE_INT_CST_HIGH (op1) == 0
534 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
535 == TREE_INT_CST_LOW (op1))
536 return true;
538 break;
540 default:
541 break;
543 return false;
546 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
547 simplification is possible.
548 If negate_expr_p would return true for T, NULL_TREE will never be
549 returned. */
551 static tree
552 fold_negate_expr (location_t loc, tree t)
554 tree type = TREE_TYPE (t);
555 tree tem;
557 switch (TREE_CODE (t))
559 /* Convert - (~A) to A + 1. */
560 case BIT_NOT_EXPR:
561 if (INTEGRAL_TYPE_P (type))
562 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
563 build_one_cst (type));
564 break;
566 case INTEGER_CST:
567 tem = fold_negate_const (t, type);
568 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
569 || !TYPE_OVERFLOW_TRAPS (type))
570 return tem;
571 break;
573 case REAL_CST:
574 tem = fold_negate_const (t, type);
575 /* Two's complement FP formats, such as c4x, may overflow. */
576 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
577 return tem;
578 break;
580 case FIXED_CST:
581 tem = fold_negate_const (t, type);
582 return tem;
584 case COMPLEX_CST:
586 tree rpart = negate_expr (TREE_REALPART (t));
587 tree ipart = negate_expr (TREE_IMAGPART (t));
589 if ((TREE_CODE (rpart) == REAL_CST
590 && TREE_CODE (ipart) == REAL_CST)
591 || (TREE_CODE (rpart) == INTEGER_CST
592 && TREE_CODE (ipart) == INTEGER_CST))
593 return build_complex (type, rpart, ipart);
595 break;
597 case VECTOR_CST:
599 int count = TYPE_VECTOR_SUBPARTS (type), i;
600 tree *elts = XALLOCAVEC (tree, count);
602 for (i = 0; i < count; i++)
604 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
605 if (elts[i] == NULL_TREE)
606 return NULL_TREE;
609 return build_vector (type, elts);
612 case COMPLEX_EXPR:
613 if (negate_expr_p (t))
614 return fold_build2_loc (loc, COMPLEX_EXPR, type,
615 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
616 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
617 break;
619 case CONJ_EXPR:
620 if (negate_expr_p (t))
621 return fold_build1_loc (loc, CONJ_EXPR, type,
622 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
623 break;
625 case NEGATE_EXPR:
626 return TREE_OPERAND (t, 0);
628 case PLUS_EXPR:
629 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
630 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
632 /* -(A + B) -> (-B) - A. */
633 if (negate_expr_p (TREE_OPERAND (t, 1))
634 && reorder_operands_p (TREE_OPERAND (t, 0),
635 TREE_OPERAND (t, 1)))
637 tem = negate_expr (TREE_OPERAND (t, 1));
638 return fold_build2_loc (loc, MINUS_EXPR, type,
639 tem, TREE_OPERAND (t, 0));
642 /* -(A + B) -> (-A) - B. */
643 if (negate_expr_p (TREE_OPERAND (t, 0)))
645 tem = negate_expr (TREE_OPERAND (t, 0));
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 tem, TREE_OPERAND (t, 1));
650 break;
652 case MINUS_EXPR:
653 /* - (A - B) -> B - A */
654 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
655 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
656 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
657 return fold_build2_loc (loc, MINUS_EXPR, type,
658 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
659 break;
661 case MULT_EXPR:
662 if (TYPE_UNSIGNED (type))
663 break;
665 /* Fall through. */
667 case RDIV_EXPR:
668 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
670 tem = TREE_OPERAND (t, 1);
671 if (negate_expr_p (tem))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 TREE_OPERAND (t, 0), negate_expr (tem));
674 tem = TREE_OPERAND (t, 0);
675 if (negate_expr_p (tem))
676 return fold_build2_loc (loc, TREE_CODE (t), type,
677 negate_expr (tem), TREE_OPERAND (t, 1));
679 break;
681 case TRUNC_DIV_EXPR:
682 case ROUND_DIV_EXPR:
683 case EXACT_DIV_EXPR:
684 /* In general we can't negate A / B, because if A is INT_MIN and
685 B is 1, we may turn this into INT_MIN / -1 which is undefined
686 and actually traps on some architectures. But if overflow is
687 undefined, we can negate, because - (INT_MIN / 1) is an
688 overflow. */
689 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
691 const char * const warnmsg = G_("assuming signed overflow does not "
692 "occur when negating a division");
693 tem = TREE_OPERAND (t, 1);
694 if (negate_expr_p (tem))
696 if (INTEGRAL_TYPE_P (type)
697 && (TREE_CODE (tem) != INTEGER_CST
698 || integer_onep (tem)))
699 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
700 return fold_build2_loc (loc, TREE_CODE (t), type,
701 TREE_OPERAND (t, 0), negate_expr (tem));
703 /* If overflow is undefined then we have to be careful because
704 we ask whether it's ok to associate the negate with the
705 division which is not ok for example for
706 -((a - b) / c) where (-(a - b)) / c may invoke undefined
707 overflow because of negating INT_MIN. So do not use
708 negate_expr_p here but open-code the two important cases. */
709 tem = TREE_OPERAND (t, 0);
710 if ((INTEGRAL_TYPE_P (type)
711 && (TREE_CODE (tem) == NEGATE_EXPR
712 || (TREE_CODE (tem) == INTEGER_CST
713 && may_negate_without_overflow_p (tem))))
714 || !INTEGRAL_TYPE_P (type))
715 return fold_build2_loc (loc, TREE_CODE (t), type,
716 negate_expr (tem), TREE_OPERAND (t, 1));
718 break;
720 case NOP_EXPR:
721 /* Convert -((double)float) into (double)(-float). */
722 if (TREE_CODE (type) == REAL_TYPE)
724 tem = strip_float_extensions (t);
725 if (tem != t && negate_expr_p (tem))
726 return fold_convert_loc (loc, type, negate_expr (tem));
728 break;
730 case CALL_EXPR:
731 /* Negate -f(x) as f(-x). */
732 if (negate_mathfn_p (builtin_mathfn_code (t))
733 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
735 tree fndecl, arg;
737 fndecl = get_callee_fndecl (t);
738 arg = negate_expr (CALL_EXPR_ARG (t, 0));
739 return build_call_expr_loc (loc, fndecl, 1, arg);
741 break;
743 case RSHIFT_EXPR:
744 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
745 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
747 tree op1 = TREE_OPERAND (t, 1);
748 if (TREE_INT_CST_HIGH (op1) == 0
749 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
750 == TREE_INT_CST_LOW (op1))
752 tree ntype = TYPE_UNSIGNED (type)
753 ? signed_type_for (type)
754 : unsigned_type_for (type);
755 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
756 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
757 return fold_convert_loc (loc, type, temp);
760 break;
762 default:
763 break;
766 return NULL_TREE;
769 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
770 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
771 return NULL_TREE. */
773 static tree
774 negate_expr (tree t)
776 tree type, tem;
777 location_t loc;
779 if (t == NULL_TREE)
780 return NULL_TREE;
782 loc = EXPR_LOCATION (t);
783 type = TREE_TYPE (t);
784 STRIP_SIGN_NOPS (t);
786 tem = fold_negate_expr (loc, t);
787 if (!tem)
788 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
789 return fold_convert_loc (loc, type, tem);
792 /* Split a tree IN into a constant, literal and variable parts that could be
793 combined with CODE to make IN. "constant" means an expression with
794 TREE_CONSTANT but that isn't an actual constant. CODE must be a
795 commutative arithmetic operation. Store the constant part into *CONP,
796 the literal in *LITP and return the variable part. If a part isn't
797 present, set it to null. If the tree does not decompose in this way,
798 return the entire tree as the variable part and the other parts as null.
800 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
801 case, we negate an operand that was subtracted. Except if it is a
802 literal for which we use *MINUS_LITP instead.
804 If NEGATE_P is true, we are negating all of IN, again except a literal
805 for which we use *MINUS_LITP instead.
807 If IN is itself a literal or constant, return it as appropriate.
809 Note that we do not guarantee that any of the three values will be the
810 same type as IN, but they will have the same signedness and mode. */
812 static tree
813 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
814 tree *minus_litp, int negate_p)
816 tree var = 0;
818 *conp = 0;
819 *litp = 0;
820 *minus_litp = 0;
822 /* Strip any conversions that don't change the machine mode or signedness. */
823 STRIP_SIGN_NOPS (in);
825 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
826 || TREE_CODE (in) == FIXED_CST)
827 *litp = in;
828 else if (TREE_CODE (in) == code
829 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
830 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
831 /* We can associate addition and subtraction together (even
832 though the C standard doesn't say so) for integers because
833 the value is not affected. For reals, the value might be
834 affected, so we can't. */
835 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
836 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
838 tree op0 = TREE_OPERAND (in, 0);
839 tree op1 = TREE_OPERAND (in, 1);
840 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
841 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
843 /* First see if either of the operands is a literal, then a constant. */
844 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
845 || TREE_CODE (op0) == FIXED_CST)
846 *litp = op0, op0 = 0;
847 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
848 || TREE_CODE (op1) == FIXED_CST)
849 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
851 if (op0 != 0 && TREE_CONSTANT (op0))
852 *conp = op0, op0 = 0;
853 else if (op1 != 0 && TREE_CONSTANT (op1))
854 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
856 /* If we haven't dealt with either operand, this is not a case we can
857 decompose. Otherwise, VAR is either of the ones remaining, if any. */
858 if (op0 != 0 && op1 != 0)
859 var = in;
860 else if (op0 != 0)
861 var = op0;
862 else
863 var = op1, neg_var_p = neg1_p;
865 /* Now do any needed negations. */
866 if (neg_litp_p)
867 *minus_litp = *litp, *litp = 0;
868 if (neg_conp_p)
869 *conp = negate_expr (*conp);
870 if (neg_var_p)
871 var = negate_expr (var);
873 else if (TREE_CODE (in) == BIT_NOT_EXPR
874 && code == PLUS_EXPR)
876 /* -X - 1 is folded to ~X, undo that here. */
877 *minus_litp = build_one_cst (TREE_TYPE (in));
878 var = negate_expr (TREE_OPERAND (in, 0));
880 else if (TREE_CONSTANT (in))
881 *conp = in;
882 else
883 var = in;
885 if (negate_p)
887 if (*litp)
888 *minus_litp = *litp, *litp = 0;
889 else if (*minus_litp)
890 *litp = *minus_litp, *minus_litp = 0;
891 *conp = negate_expr (*conp);
892 var = negate_expr (var);
895 return var;
898 /* Re-associate trees split by the above function. T1 and T2 are
899 either expressions to associate or null. Return the new
900 expression, if any. LOC is the location of the new expression. If
901 we build an operation, do it in TYPE and with CODE. */
903 static tree
904 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
906 if (t1 == 0)
907 return t2;
908 else if (t2 == 0)
909 return t1;
911 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
912 try to fold this since we will have infinite recursion. But do
913 deal with any NEGATE_EXPRs. */
914 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
915 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
917 if (code == PLUS_EXPR)
919 if (TREE_CODE (t1) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t2),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t1, 0)));
924 else if (TREE_CODE (t2) == NEGATE_EXPR)
925 return build2_loc (loc, MINUS_EXPR, type,
926 fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type,
928 TREE_OPERAND (t2, 0)));
929 else if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
932 else if (code == MINUS_EXPR)
934 if (integer_zerop (t2))
935 return fold_convert_loc (loc, type, t1);
938 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
942 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
946 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
947 for use in int_const_binop, size_binop and size_diffop. */
949 static bool
950 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
952 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
953 return false;
954 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
955 return false;
957 switch (code)
959 case LSHIFT_EXPR:
960 case RSHIFT_EXPR:
961 case LROTATE_EXPR:
962 case RROTATE_EXPR:
963 return true;
965 default:
966 break;
969 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
970 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
971 && TYPE_MODE (type1) == TYPE_MODE (type2);
975 /* Combine two integer constants ARG1 and ARG2 under operation CODE
976 to produce a new constant. Return NULL_TREE if we don't know how
977 to evaluate CODE at compile-time. */
979 static tree
980 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
981 int overflowable)
983 double_int op1, op2, res, tmp;
984 tree t;
985 tree type = TREE_TYPE (arg1);
986 bool uns = TYPE_UNSIGNED (type);
987 bool overflow = false;
989 op1 = tree_to_double_int (arg1);
990 op2 = tree_to_double_int (arg2);
992 switch (code)
994 case BIT_IOR_EXPR:
995 res = op1 | op2;
996 break;
998 case BIT_XOR_EXPR:
999 res = op1 ^ op2;
1000 break;
1002 case BIT_AND_EXPR:
1003 res = op1 & op2;
1004 break;
1006 case RSHIFT_EXPR:
1007 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1008 break;
1010 case LSHIFT_EXPR:
1011 /* It's unclear from the C standard whether shifts can overflow.
1012 The following code ignores overflow; perhaps a C standard
1013 interpretation ruling is needed. */
1014 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1015 break;
1017 case RROTATE_EXPR:
1018 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1019 break;
1021 case LROTATE_EXPR:
1022 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1023 break;
1025 case PLUS_EXPR:
1026 res = op1.add_with_sign (op2, false, &overflow);
1027 break;
1029 case MINUS_EXPR:
1030 res = op1.sub_with_overflow (op2, &overflow);
1031 break;
1033 case MULT_EXPR:
1034 res = op1.mul_with_sign (op2, false, &overflow);
1035 break;
1037 case MULT_HIGHPART_EXPR:
1038 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1040 bool dummy_overflow;
1041 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1042 return NULL_TREE;
1043 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1045 else
1047 bool dummy_overflow;
1048 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1049 is performed in twice the precision of arguments. */
1050 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1051 res = tmp.rshift (TYPE_PRECISION (type),
1052 2 * TYPE_PRECISION (type), !uns);
1054 break;
1056 case TRUNC_DIV_EXPR:
1057 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1058 case EXACT_DIV_EXPR:
1059 /* This is a shortcut for a common special case. */
1060 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1061 && !TREE_OVERFLOW (arg1)
1062 && !TREE_OVERFLOW (arg2)
1063 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1065 if (code == CEIL_DIV_EXPR)
1066 op1.low += op2.low - 1;
1068 res.low = op1.low / op2.low, res.high = 0;
1069 break;
1072 /* ... fall through ... */
1074 case ROUND_DIV_EXPR:
1075 if (op2.is_zero ())
1076 return NULL_TREE;
1077 if (op2.is_one ())
1079 res = op1;
1080 break;
1082 if (op1 == op2 && !op1.is_zero ())
1084 res = double_int_one;
1085 break;
1087 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1088 break;
1090 case TRUNC_MOD_EXPR:
1091 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1092 /* This is a shortcut for a common special case. */
1093 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1094 && !TREE_OVERFLOW (arg1)
1095 && !TREE_OVERFLOW (arg2)
1096 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1098 if (code == CEIL_MOD_EXPR)
1099 op1.low += op2.low - 1;
1100 res.low = op1.low % op2.low, res.high = 0;
1101 break;
1104 /* ... fall through ... */
1106 case ROUND_MOD_EXPR:
1107 if (op2.is_zero ())
1108 return NULL_TREE;
1110 /* Check for the case the case of INT_MIN % -1 and return
1111 overflow and result = 0. The TImode case is handled properly
1112 in double-int. */
1113 if (TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT
1114 && !uns
1115 && op2.is_minus_one ()
1116 && op1.high == (HOST_WIDE_INT) -1
1117 && (HOST_WIDE_INT) op1.low
1118 == (((HOST_WIDE_INT)-1) << (TYPE_PRECISION (type) - 1)))
1120 overflow = 1;
1121 res = double_int_zero;
1123 else
1124 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1125 break;
1127 case MIN_EXPR:
1128 res = op1.min (op2, uns);
1129 break;
1131 case MAX_EXPR:
1132 res = op1.max (op2, uns);
1133 break;
1135 default:
1136 return NULL_TREE;
1139 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1140 (!uns && overflow)
1141 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1143 return t;
1146 tree
1147 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1149 return int_const_binop_1 (code, arg1, arg2, 1);
1152 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1153 constant. We assume ARG1 and ARG2 have the same data type, or at least
1154 are the same kind of constant and the same machine mode. Return zero if
1155 combining the constants is not allowed in the current operating mode. */
1157 static tree
1158 const_binop (enum tree_code code, tree arg1, tree arg2)
1160 /* Sanity check for the recursive cases. */
1161 if (!arg1 || !arg2)
1162 return NULL_TREE;
1164 STRIP_NOPS (arg1);
1165 STRIP_NOPS (arg2);
1167 if (TREE_CODE (arg1) == INTEGER_CST)
1168 return int_const_binop (code, arg1, arg2);
1170 if (TREE_CODE (arg1) == REAL_CST)
1172 enum machine_mode mode;
1173 REAL_VALUE_TYPE d1;
1174 REAL_VALUE_TYPE d2;
1175 REAL_VALUE_TYPE value;
1176 REAL_VALUE_TYPE result;
1177 bool inexact;
1178 tree t, type;
1180 /* The following codes are handled by real_arithmetic. */
1181 switch (code)
1183 case PLUS_EXPR:
1184 case MINUS_EXPR:
1185 case MULT_EXPR:
1186 case RDIV_EXPR:
1187 case MIN_EXPR:
1188 case MAX_EXPR:
1189 break;
1191 default:
1192 return NULL_TREE;
1195 d1 = TREE_REAL_CST (arg1);
1196 d2 = TREE_REAL_CST (arg2);
1198 type = TREE_TYPE (arg1);
1199 mode = TYPE_MODE (type);
1201 /* Don't perform operation if we honor signaling NaNs and
1202 either operand is a NaN. */
1203 if (HONOR_SNANS (mode)
1204 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1205 return NULL_TREE;
1207 /* Don't perform operation if it would raise a division
1208 by zero exception. */
1209 if (code == RDIV_EXPR
1210 && REAL_VALUES_EQUAL (d2, dconst0)
1211 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1212 return NULL_TREE;
1214 /* If either operand is a NaN, just return it. Otherwise, set up
1215 for floating-point trap; we return an overflow. */
1216 if (REAL_VALUE_ISNAN (d1))
1217 return arg1;
1218 else if (REAL_VALUE_ISNAN (d2))
1219 return arg2;
1221 inexact = real_arithmetic (&value, code, &d1, &d2);
1222 real_convert (&result, mode, &value);
1224 /* Don't constant fold this floating point operation if
1225 the result has overflowed and flag_trapping_math. */
1226 if (flag_trapping_math
1227 && MODE_HAS_INFINITIES (mode)
1228 && REAL_VALUE_ISINF (result)
1229 && !REAL_VALUE_ISINF (d1)
1230 && !REAL_VALUE_ISINF (d2))
1231 return NULL_TREE;
1233 /* Don't constant fold this floating point operation if the
1234 result may dependent upon the run-time rounding mode and
1235 flag_rounding_math is set, or if GCC's software emulation
1236 is unable to accurately represent the result. */
1237 if ((flag_rounding_math
1238 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1239 && (inexact || !real_identical (&result, &value)))
1240 return NULL_TREE;
1242 t = build_real (type, result);
1244 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1245 return t;
1248 if (TREE_CODE (arg1) == FIXED_CST)
1250 FIXED_VALUE_TYPE f1;
1251 FIXED_VALUE_TYPE f2;
1252 FIXED_VALUE_TYPE result;
1253 tree t, type;
1254 int sat_p;
1255 bool overflow_p;
1257 /* The following codes are handled by fixed_arithmetic. */
1258 switch (code)
1260 case PLUS_EXPR:
1261 case MINUS_EXPR:
1262 case MULT_EXPR:
1263 case TRUNC_DIV_EXPR:
1264 f2 = TREE_FIXED_CST (arg2);
1265 break;
1267 case LSHIFT_EXPR:
1268 case RSHIFT_EXPR:
1269 f2.data.high = TREE_INT_CST_HIGH (arg2);
1270 f2.data.low = TREE_INT_CST_LOW (arg2);
1271 f2.mode = SImode;
1272 break;
1274 default:
1275 return NULL_TREE;
1278 f1 = TREE_FIXED_CST (arg1);
1279 type = TREE_TYPE (arg1);
1280 sat_p = TYPE_SATURATING (type);
1281 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1282 t = build_fixed (type, result);
1283 /* Propagate overflow flags. */
1284 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1285 TREE_OVERFLOW (t) = 1;
1286 return t;
1289 if (TREE_CODE (arg1) == COMPLEX_CST)
1291 tree type = TREE_TYPE (arg1);
1292 tree r1 = TREE_REALPART (arg1);
1293 tree i1 = TREE_IMAGPART (arg1);
1294 tree r2 = TREE_REALPART (arg2);
1295 tree i2 = TREE_IMAGPART (arg2);
1296 tree real, imag;
1298 switch (code)
1300 case PLUS_EXPR:
1301 case MINUS_EXPR:
1302 real = const_binop (code, r1, r2);
1303 imag = const_binop (code, i1, i2);
1304 break;
1306 case MULT_EXPR:
1307 if (COMPLEX_FLOAT_TYPE_P (type))
1308 return do_mpc_arg2 (arg1, arg2, type,
1309 /* do_nonfinite= */ folding_initializer,
1310 mpc_mul);
1312 real = const_binop (MINUS_EXPR,
1313 const_binop (MULT_EXPR, r1, r2),
1314 const_binop (MULT_EXPR, i1, i2));
1315 imag = const_binop (PLUS_EXPR,
1316 const_binop (MULT_EXPR, r1, i2),
1317 const_binop (MULT_EXPR, i1, r2));
1318 break;
1320 case RDIV_EXPR:
1321 if (COMPLEX_FLOAT_TYPE_P (type))
1322 return do_mpc_arg2 (arg1, arg2, type,
1323 /* do_nonfinite= */ folding_initializer,
1324 mpc_div);
1325 /* Fallthru ... */
1326 case TRUNC_DIV_EXPR:
1327 case CEIL_DIV_EXPR:
1328 case FLOOR_DIV_EXPR:
1329 case ROUND_DIV_EXPR:
1330 if (flag_complex_method == 0)
1332 /* Keep this algorithm in sync with
1333 tree-complex.c:expand_complex_div_straight().
1335 Expand complex division to scalars, straightforward algorithm.
1336 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1337 t = br*br + bi*bi
1339 tree magsquared
1340 = const_binop (PLUS_EXPR,
1341 const_binop (MULT_EXPR, r2, r2),
1342 const_binop (MULT_EXPR, i2, i2));
1343 tree t1
1344 = const_binop (PLUS_EXPR,
1345 const_binop (MULT_EXPR, r1, r2),
1346 const_binop (MULT_EXPR, i1, i2));
1347 tree t2
1348 = const_binop (MINUS_EXPR,
1349 const_binop (MULT_EXPR, i1, r2),
1350 const_binop (MULT_EXPR, r1, i2));
1352 real = const_binop (code, t1, magsquared);
1353 imag = const_binop (code, t2, magsquared);
1355 else
1357 /* Keep this algorithm in sync with
1358 tree-complex.c:expand_complex_div_wide().
1360 Expand complex division to scalars, modified algorithm to minimize
1361 overflow with wide input ranges. */
1362 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1363 fold_abs_const (r2, TREE_TYPE (type)),
1364 fold_abs_const (i2, TREE_TYPE (type)));
1366 if (integer_nonzerop (compare))
1368 /* In the TRUE branch, we compute
1369 ratio = br/bi;
1370 div = (br * ratio) + bi;
1371 tr = (ar * ratio) + ai;
1372 ti = (ai * ratio) - ar;
1373 tr = tr / div;
1374 ti = ti / div; */
1375 tree ratio = const_binop (code, r2, i2);
1376 tree div = const_binop (PLUS_EXPR, i2,
1377 const_binop (MULT_EXPR, r2, ratio));
1378 real = const_binop (MULT_EXPR, r1, ratio);
1379 real = const_binop (PLUS_EXPR, real, i1);
1380 real = const_binop (code, real, div);
1382 imag = const_binop (MULT_EXPR, i1, ratio);
1383 imag = const_binop (MINUS_EXPR, imag, r1);
1384 imag = const_binop (code, imag, div);
1386 else
1388 /* In the FALSE branch, we compute
1389 ratio = d/c;
1390 divisor = (d * ratio) + c;
1391 tr = (b * ratio) + a;
1392 ti = b - (a * ratio);
1393 tr = tr / div;
1394 ti = ti / div; */
1395 tree ratio = const_binop (code, i2, r2);
1396 tree div = const_binop (PLUS_EXPR, r2,
1397 const_binop (MULT_EXPR, i2, ratio));
1399 real = const_binop (MULT_EXPR, i1, ratio);
1400 real = const_binop (PLUS_EXPR, real, r1);
1401 real = const_binop (code, real, div);
1403 imag = const_binop (MULT_EXPR, r1, ratio);
1404 imag = const_binop (MINUS_EXPR, i1, imag);
1405 imag = const_binop (code, imag, div);
1408 break;
1410 default:
1411 return NULL_TREE;
1414 if (real && imag)
1415 return build_complex (type, real, imag);
1418 if (TREE_CODE (arg1) == VECTOR_CST
1419 && TREE_CODE (arg2) == VECTOR_CST)
1421 tree type = TREE_TYPE (arg1);
1422 int count = TYPE_VECTOR_SUBPARTS (type), i;
1423 tree *elts = XALLOCAVEC (tree, count);
1425 for (i = 0; i < count; i++)
1427 tree elem1 = VECTOR_CST_ELT (arg1, i);
1428 tree elem2 = VECTOR_CST_ELT (arg2, i);
1430 elts[i] = const_binop (code, elem1, elem2);
1432 /* It is possible that const_binop cannot handle the given
1433 code and return NULL_TREE */
1434 if (elts[i] == NULL_TREE)
1435 return NULL_TREE;
1438 return build_vector (type, elts);
1441 /* Shifts allow a scalar offset for a vector. */
1442 if (TREE_CODE (arg1) == VECTOR_CST
1443 && TREE_CODE (arg2) == INTEGER_CST)
1445 tree type = TREE_TYPE (arg1);
1446 int count = TYPE_VECTOR_SUBPARTS (type), i;
1447 tree *elts = XALLOCAVEC (tree, count);
1449 if (code == VEC_LSHIFT_EXPR
1450 || code == VEC_RSHIFT_EXPR)
1452 if (!tree_fits_uhwi_p (arg2))
1453 return NULL_TREE;
1455 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1456 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1457 unsigned HOST_WIDE_INT innerc
1458 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1459 if (shiftc >= outerc || (shiftc % innerc) != 0)
1460 return NULL_TREE;
1461 int offset = shiftc / innerc;
1462 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1463 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1464 for !BYTES_BIG_ENDIAN picks first vector element, but
1465 for BYTES_BIG_ENDIAN last element from the vector. */
1466 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1467 offset = -offset;
1468 tree zero = build_zero_cst (TREE_TYPE (type));
1469 for (i = 0; i < count; i++)
1471 if (i + offset < 0 || i + offset >= count)
1472 elts[i] = zero;
1473 else
1474 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1477 else
1478 for (i = 0; i < count; i++)
1480 tree elem1 = VECTOR_CST_ELT (arg1, i);
1482 elts[i] = const_binop (code, elem1, arg2);
1484 /* It is possible that const_binop cannot handle the given
1485 code and return NULL_TREE */
1486 if (elts[i] == NULL_TREE)
1487 return NULL_TREE;
1490 return build_vector (type, elts);
1492 return NULL_TREE;
1495 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1496 indicates which particular sizetype to create. */
1498 tree
1499 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1501 return build_int_cst (sizetype_tab[(int) kind], number);
1504 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1505 is a tree code. The type of the result is taken from the operands.
1506 Both must be equivalent integer types, ala int_binop_types_match_p.
1507 If the operands are constant, so is the result. */
1509 tree
1510 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1512 tree type = TREE_TYPE (arg0);
1514 if (arg0 == error_mark_node || arg1 == error_mark_node)
1515 return error_mark_node;
1517 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1518 TREE_TYPE (arg1)));
1520 /* Handle the special case of two integer constants faster. */
1521 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1523 /* And some specific cases even faster than that. */
1524 if (code == PLUS_EXPR)
1526 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1527 return arg1;
1528 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1529 return arg0;
1531 else if (code == MINUS_EXPR)
1533 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1534 return arg0;
1536 else if (code == MULT_EXPR)
1538 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1539 return arg1;
1542 /* Handle general case of two integer constants. For sizetype
1543 constant calculations we always want to know about overflow,
1544 even in the unsigned case. */
1545 return int_const_binop_1 (code, arg0, arg1, -1);
1548 return fold_build2_loc (loc, code, type, arg0, arg1);
1551 /* Given two values, either both of sizetype or both of bitsizetype,
1552 compute the difference between the two values. Return the value
1553 in signed type corresponding to the type of the operands. */
1555 tree
1556 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1558 tree type = TREE_TYPE (arg0);
1559 tree ctype;
1561 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1562 TREE_TYPE (arg1)));
1564 /* If the type is already signed, just do the simple thing. */
1565 if (!TYPE_UNSIGNED (type))
1566 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1568 if (type == sizetype)
1569 ctype = ssizetype;
1570 else if (type == bitsizetype)
1571 ctype = sbitsizetype;
1572 else
1573 ctype = signed_type_for (type);
1575 /* If either operand is not a constant, do the conversions to the signed
1576 type and subtract. The hardware will do the right thing with any
1577 overflow in the subtraction. */
1578 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1579 return size_binop_loc (loc, MINUS_EXPR,
1580 fold_convert_loc (loc, ctype, arg0),
1581 fold_convert_loc (loc, ctype, arg1));
1583 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1584 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1585 overflow) and negate (which can't either). Special-case a result
1586 of zero while we're here. */
1587 if (tree_int_cst_equal (arg0, arg1))
1588 return build_int_cst (ctype, 0);
1589 else if (tree_int_cst_lt (arg1, arg0))
1590 return fold_convert_loc (loc, ctype,
1591 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1592 else
1593 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1594 fold_convert_loc (loc, ctype,
1595 size_binop_loc (loc,
1596 MINUS_EXPR,
1597 arg1, arg0)));
1600 /* A subroutine of fold_convert_const handling conversions of an
1601 INTEGER_CST to another integer type. */
1603 static tree
1604 fold_convert_const_int_from_int (tree type, const_tree arg1)
1606 tree t;
1608 /* Given an integer constant, make new constant with new type,
1609 appropriately sign-extended or truncated. */
1610 t = force_fit_type_double (type, tree_to_double_int (arg1),
1611 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1612 (TREE_INT_CST_HIGH (arg1) < 0
1613 && (TYPE_UNSIGNED (type)
1614 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1615 | TREE_OVERFLOW (arg1));
1617 return t;
1620 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1621 to an integer type. */
1623 static tree
1624 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1626 int overflow = 0;
1627 tree t;
1629 /* The following code implements the floating point to integer
1630 conversion rules required by the Java Language Specification,
1631 that IEEE NaNs are mapped to zero and values that overflow
1632 the target precision saturate, i.e. values greater than
1633 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1634 are mapped to INT_MIN. These semantics are allowed by the
1635 C and C++ standards that simply state that the behavior of
1636 FP-to-integer conversion is unspecified upon overflow. */
1638 double_int val;
1639 REAL_VALUE_TYPE r;
1640 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1642 switch (code)
1644 case FIX_TRUNC_EXPR:
1645 real_trunc (&r, VOIDmode, &x);
1646 break;
1648 default:
1649 gcc_unreachable ();
1652 /* If R is NaN, return zero and show we have an overflow. */
1653 if (REAL_VALUE_ISNAN (r))
1655 overflow = 1;
1656 val = double_int_zero;
1659 /* See if R is less than the lower bound or greater than the
1660 upper bound. */
1662 if (! overflow)
1664 tree lt = TYPE_MIN_VALUE (type);
1665 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1666 if (REAL_VALUES_LESS (r, l))
1668 overflow = 1;
1669 val = tree_to_double_int (lt);
1673 if (! overflow)
1675 tree ut = TYPE_MAX_VALUE (type);
1676 if (ut)
1678 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1679 if (REAL_VALUES_LESS (u, r))
1681 overflow = 1;
1682 val = tree_to_double_int (ut);
1687 if (! overflow)
1688 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1690 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1691 return t;
1694 /* A subroutine of fold_convert_const handling conversions of a
1695 FIXED_CST to an integer type. */
1697 static tree
1698 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1700 tree t;
1701 double_int temp, temp_trunc;
1702 unsigned int mode;
1704 /* Right shift FIXED_CST to temp by fbit. */
1705 temp = TREE_FIXED_CST (arg1).data;
1706 mode = TREE_FIXED_CST (arg1).mode;
1707 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1709 temp = temp.rshift (GET_MODE_FBIT (mode),
1710 HOST_BITS_PER_DOUBLE_INT,
1711 SIGNED_FIXED_POINT_MODE_P (mode));
1713 /* Left shift temp to temp_trunc by fbit. */
1714 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1715 HOST_BITS_PER_DOUBLE_INT,
1716 SIGNED_FIXED_POINT_MODE_P (mode));
1718 else
1720 temp = double_int_zero;
1721 temp_trunc = double_int_zero;
1724 /* If FIXED_CST is negative, we need to round the value toward 0.
1725 By checking if the fractional bits are not zero to add 1 to temp. */
1726 if (SIGNED_FIXED_POINT_MODE_P (mode)
1727 && temp_trunc.is_negative ()
1728 && TREE_FIXED_CST (arg1).data != temp_trunc)
1729 temp += double_int_one;
1731 /* Given a fixed-point constant, make new constant with new type,
1732 appropriately sign-extended or truncated. */
1733 t = force_fit_type_double (type, temp, -1,
1734 (temp.is_negative ()
1735 && (TYPE_UNSIGNED (type)
1736 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1737 | TREE_OVERFLOW (arg1));
1739 return t;
1742 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1743 to another floating point type. */
1745 static tree
1746 fold_convert_const_real_from_real (tree type, const_tree arg1)
1748 REAL_VALUE_TYPE value;
1749 tree t;
1751 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1752 t = build_real (type, value);
1754 /* If converting an infinity or NAN to a representation that doesn't
1755 have one, set the overflow bit so that we can produce some kind of
1756 error message at the appropriate point if necessary. It's not the
1757 most user-friendly message, but it's better than nothing. */
1758 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1759 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1760 TREE_OVERFLOW (t) = 1;
1761 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1762 && !MODE_HAS_NANS (TYPE_MODE (type)))
1763 TREE_OVERFLOW (t) = 1;
1764 /* Regular overflow, conversion produced an infinity in a mode that
1765 can't represent them. */
1766 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1767 && REAL_VALUE_ISINF (value)
1768 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1769 TREE_OVERFLOW (t) = 1;
1770 else
1771 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1772 return t;
1775 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1776 to a floating point type. */
1778 static tree
1779 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1781 REAL_VALUE_TYPE value;
1782 tree t;
1784 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1785 t = build_real (type, value);
1787 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1788 return t;
1791 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1792 to another fixed-point type. */
1794 static tree
1795 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1797 FIXED_VALUE_TYPE value;
1798 tree t;
1799 bool overflow_p;
1801 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1802 TYPE_SATURATING (type));
1803 t = build_fixed (type, value);
1805 /* Propagate overflow flags. */
1806 if (overflow_p | TREE_OVERFLOW (arg1))
1807 TREE_OVERFLOW (t) = 1;
1808 return t;
1811 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1812 to a fixed-point type. */
1814 static tree
1815 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1817 FIXED_VALUE_TYPE value;
1818 tree t;
1819 bool overflow_p;
1821 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1822 TREE_INT_CST (arg1),
1823 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1824 TYPE_SATURATING (type));
1825 t = build_fixed (type, value);
1827 /* Propagate overflow flags. */
1828 if (overflow_p | TREE_OVERFLOW (arg1))
1829 TREE_OVERFLOW (t) = 1;
1830 return t;
1833 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1834 to a fixed-point type. */
1836 static tree
1837 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1839 FIXED_VALUE_TYPE value;
1840 tree t;
1841 bool overflow_p;
1843 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1844 &TREE_REAL_CST (arg1),
1845 TYPE_SATURATING (type));
1846 t = build_fixed (type, value);
1848 /* Propagate overflow flags. */
1849 if (overflow_p | TREE_OVERFLOW (arg1))
1850 TREE_OVERFLOW (t) = 1;
1851 return t;
1854 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1855 type TYPE. If no simplification can be done return NULL_TREE. */
1857 static tree
1858 fold_convert_const (enum tree_code code, tree type, tree arg1)
1860 if (TREE_TYPE (arg1) == type)
1861 return arg1;
1863 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1864 || TREE_CODE (type) == OFFSET_TYPE)
1866 if (TREE_CODE (arg1) == INTEGER_CST)
1867 return fold_convert_const_int_from_int (type, arg1);
1868 else if (TREE_CODE (arg1) == REAL_CST)
1869 return fold_convert_const_int_from_real (code, type, arg1);
1870 else if (TREE_CODE (arg1) == FIXED_CST)
1871 return fold_convert_const_int_from_fixed (type, arg1);
1873 else if (TREE_CODE (type) == REAL_TYPE)
1875 if (TREE_CODE (arg1) == INTEGER_CST)
1876 return build_real_from_int_cst (type, arg1);
1877 else if (TREE_CODE (arg1) == REAL_CST)
1878 return fold_convert_const_real_from_real (type, arg1);
1879 else if (TREE_CODE (arg1) == FIXED_CST)
1880 return fold_convert_const_real_from_fixed (type, arg1);
1882 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1884 if (TREE_CODE (arg1) == FIXED_CST)
1885 return fold_convert_const_fixed_from_fixed (type, arg1);
1886 else if (TREE_CODE (arg1) == INTEGER_CST)
1887 return fold_convert_const_fixed_from_int (type, arg1);
1888 else if (TREE_CODE (arg1) == REAL_CST)
1889 return fold_convert_const_fixed_from_real (type, arg1);
1891 return NULL_TREE;
1894 /* Construct a vector of zero elements of vector type TYPE. */
1896 static tree
1897 build_zero_vector (tree type)
1899 tree t;
1901 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1902 return build_vector_from_val (type, t);
1905 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1907 bool
1908 fold_convertible_p (const_tree type, const_tree arg)
1910 tree orig = TREE_TYPE (arg);
1912 if (type == orig)
1913 return true;
1915 if (TREE_CODE (arg) == ERROR_MARK
1916 || TREE_CODE (type) == ERROR_MARK
1917 || TREE_CODE (orig) == ERROR_MARK)
1918 return false;
1920 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1921 return true;
1923 switch (TREE_CODE (type))
1925 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1926 case POINTER_TYPE: case REFERENCE_TYPE:
1927 case OFFSET_TYPE:
1928 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1929 || TREE_CODE (orig) == OFFSET_TYPE)
1930 return true;
1931 return (TREE_CODE (orig) == VECTOR_TYPE
1932 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1934 case REAL_TYPE:
1935 case FIXED_POINT_TYPE:
1936 case COMPLEX_TYPE:
1937 case VECTOR_TYPE:
1938 case VOID_TYPE:
1939 return TREE_CODE (type) == TREE_CODE (orig);
1941 default:
1942 return false;
1946 /* Convert expression ARG to type TYPE. Used by the middle-end for
1947 simple conversions in preference to calling the front-end's convert. */
1949 tree
1950 fold_convert_loc (location_t loc, tree type, tree arg)
1952 tree orig = TREE_TYPE (arg);
1953 tree tem;
1955 if (type == orig)
1956 return arg;
1958 if (TREE_CODE (arg) == ERROR_MARK
1959 || TREE_CODE (type) == ERROR_MARK
1960 || TREE_CODE (orig) == ERROR_MARK)
1961 return error_mark_node;
1963 switch (TREE_CODE (type))
1965 case POINTER_TYPE:
1966 case REFERENCE_TYPE:
1967 /* Handle conversions between pointers to different address spaces. */
1968 if (POINTER_TYPE_P (orig)
1969 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1970 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1971 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1972 /* fall through */
1974 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1975 case OFFSET_TYPE:
1976 if (TREE_CODE (arg) == INTEGER_CST)
1978 tem = fold_convert_const (NOP_EXPR, type, arg);
1979 if (tem != NULL_TREE)
1980 return tem;
1982 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1983 || TREE_CODE (orig) == OFFSET_TYPE)
1984 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1985 if (TREE_CODE (orig) == COMPLEX_TYPE)
1986 return fold_convert_loc (loc, type,
1987 fold_build1_loc (loc, REALPART_EXPR,
1988 TREE_TYPE (orig), arg));
1989 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1990 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1991 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1993 case REAL_TYPE:
1994 if (TREE_CODE (arg) == INTEGER_CST)
1996 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1997 if (tem != NULL_TREE)
1998 return tem;
2000 else if (TREE_CODE (arg) == REAL_CST)
2002 tem = fold_convert_const (NOP_EXPR, type, arg);
2003 if (tem != NULL_TREE)
2004 return tem;
2006 else if (TREE_CODE (arg) == FIXED_CST)
2008 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2009 if (tem != NULL_TREE)
2010 return tem;
2013 switch (TREE_CODE (orig))
2015 case INTEGER_TYPE:
2016 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2017 case POINTER_TYPE: case REFERENCE_TYPE:
2018 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2020 case REAL_TYPE:
2021 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2023 case FIXED_POINT_TYPE:
2024 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2026 case COMPLEX_TYPE:
2027 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2028 return fold_convert_loc (loc, type, tem);
2030 default:
2031 gcc_unreachable ();
2034 case FIXED_POINT_TYPE:
2035 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2036 || TREE_CODE (arg) == REAL_CST)
2038 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2039 if (tem != NULL_TREE)
2040 goto fold_convert_exit;
2043 switch (TREE_CODE (orig))
2045 case FIXED_POINT_TYPE:
2046 case INTEGER_TYPE:
2047 case ENUMERAL_TYPE:
2048 case BOOLEAN_TYPE:
2049 case REAL_TYPE:
2050 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2052 case COMPLEX_TYPE:
2053 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2054 return fold_convert_loc (loc, type, tem);
2056 default:
2057 gcc_unreachable ();
2060 case COMPLEX_TYPE:
2061 switch (TREE_CODE (orig))
2063 case INTEGER_TYPE:
2064 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2065 case POINTER_TYPE: case REFERENCE_TYPE:
2066 case REAL_TYPE:
2067 case FIXED_POINT_TYPE:
2068 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2069 fold_convert_loc (loc, TREE_TYPE (type), arg),
2070 fold_convert_loc (loc, TREE_TYPE (type),
2071 integer_zero_node));
2072 case COMPLEX_TYPE:
2074 tree rpart, ipart;
2076 if (TREE_CODE (arg) == COMPLEX_EXPR)
2078 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2079 TREE_OPERAND (arg, 0));
2080 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2081 TREE_OPERAND (arg, 1));
2082 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2085 arg = save_expr (arg);
2086 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2087 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2088 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2089 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2090 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2093 default:
2094 gcc_unreachable ();
2097 case VECTOR_TYPE:
2098 if (integer_zerop (arg))
2099 return build_zero_vector (type);
2100 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2101 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2102 || TREE_CODE (orig) == VECTOR_TYPE);
2103 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2105 case VOID_TYPE:
2106 tem = fold_ignored_result (arg);
2107 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2109 default:
2110 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2111 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2112 gcc_unreachable ();
2114 fold_convert_exit:
2115 protected_set_expr_location_unshare (tem, loc);
2116 return tem;
2119 /* Return false if expr can be assumed not to be an lvalue, true
2120 otherwise. */
2122 static bool
2123 maybe_lvalue_p (const_tree x)
2125 /* We only need to wrap lvalue tree codes. */
2126 switch (TREE_CODE (x))
2128 case VAR_DECL:
2129 case PARM_DECL:
2130 case RESULT_DECL:
2131 case LABEL_DECL:
2132 case FUNCTION_DECL:
2133 case SSA_NAME:
2135 case COMPONENT_REF:
2136 case MEM_REF:
2137 case INDIRECT_REF:
2138 case ARRAY_REF:
2139 case ARRAY_RANGE_REF:
2140 case BIT_FIELD_REF:
2141 case OBJ_TYPE_REF:
2143 case REALPART_EXPR:
2144 case IMAGPART_EXPR:
2145 case PREINCREMENT_EXPR:
2146 case PREDECREMENT_EXPR:
2147 case SAVE_EXPR:
2148 case TRY_CATCH_EXPR:
2149 case WITH_CLEANUP_EXPR:
2150 case COMPOUND_EXPR:
2151 case MODIFY_EXPR:
2152 case TARGET_EXPR:
2153 case COND_EXPR:
2154 case BIND_EXPR:
2155 break;
2157 default:
2158 /* Assume the worst for front-end tree codes. */
2159 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2160 break;
2161 return false;
2164 return true;
2167 /* Return an expr equal to X but certainly not valid as an lvalue. */
2169 tree
2170 non_lvalue_loc (location_t loc, tree x)
2172 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2173 us. */
2174 if (in_gimple_form)
2175 return x;
2177 if (! maybe_lvalue_p (x))
2178 return x;
2179 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2182 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2183 Zero means allow extended lvalues. */
2185 int pedantic_lvalues;
2187 /* When pedantic, return an expr equal to X but certainly not valid as a
2188 pedantic lvalue. Otherwise, return X. */
2190 static tree
2191 pedantic_non_lvalue_loc (location_t loc, tree x)
2193 if (pedantic_lvalues)
2194 return non_lvalue_loc (loc, x);
2196 return protected_set_expr_location_unshare (x, loc);
2199 /* Given a tree comparison code, return the code that is the logical inverse.
2200 It is generally not safe to do this for floating-point comparisons, except
2201 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2202 ERROR_MARK in this case. */
2204 enum tree_code
2205 invert_tree_comparison (enum tree_code code, bool honor_nans)
2207 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2208 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2209 return ERROR_MARK;
2211 switch (code)
2213 case EQ_EXPR:
2214 return NE_EXPR;
2215 case NE_EXPR:
2216 return EQ_EXPR;
2217 case GT_EXPR:
2218 return honor_nans ? UNLE_EXPR : LE_EXPR;
2219 case GE_EXPR:
2220 return honor_nans ? UNLT_EXPR : LT_EXPR;
2221 case LT_EXPR:
2222 return honor_nans ? UNGE_EXPR : GE_EXPR;
2223 case LE_EXPR:
2224 return honor_nans ? UNGT_EXPR : GT_EXPR;
2225 case LTGT_EXPR:
2226 return UNEQ_EXPR;
2227 case UNEQ_EXPR:
2228 return LTGT_EXPR;
2229 case UNGT_EXPR:
2230 return LE_EXPR;
2231 case UNGE_EXPR:
2232 return LT_EXPR;
2233 case UNLT_EXPR:
2234 return GE_EXPR;
2235 case UNLE_EXPR:
2236 return GT_EXPR;
2237 case ORDERED_EXPR:
2238 return UNORDERED_EXPR;
2239 case UNORDERED_EXPR:
2240 return ORDERED_EXPR;
2241 default:
2242 gcc_unreachable ();
2246 /* Similar, but return the comparison that results if the operands are
2247 swapped. This is safe for floating-point. */
2249 enum tree_code
2250 swap_tree_comparison (enum tree_code code)
2252 switch (code)
2254 case EQ_EXPR:
2255 case NE_EXPR:
2256 case ORDERED_EXPR:
2257 case UNORDERED_EXPR:
2258 case LTGT_EXPR:
2259 case UNEQ_EXPR:
2260 return code;
2261 case GT_EXPR:
2262 return LT_EXPR;
2263 case GE_EXPR:
2264 return LE_EXPR;
2265 case LT_EXPR:
2266 return GT_EXPR;
2267 case LE_EXPR:
2268 return GE_EXPR;
2269 case UNGT_EXPR:
2270 return UNLT_EXPR;
2271 case UNGE_EXPR:
2272 return UNLE_EXPR;
2273 case UNLT_EXPR:
2274 return UNGT_EXPR;
2275 case UNLE_EXPR:
2276 return UNGE_EXPR;
2277 default:
2278 gcc_unreachable ();
2283 /* Convert a comparison tree code from an enum tree_code representation
2284 into a compcode bit-based encoding. This function is the inverse of
2285 compcode_to_comparison. */
2287 static enum comparison_code
2288 comparison_to_compcode (enum tree_code code)
2290 switch (code)
2292 case LT_EXPR:
2293 return COMPCODE_LT;
2294 case EQ_EXPR:
2295 return COMPCODE_EQ;
2296 case LE_EXPR:
2297 return COMPCODE_LE;
2298 case GT_EXPR:
2299 return COMPCODE_GT;
2300 case NE_EXPR:
2301 return COMPCODE_NE;
2302 case GE_EXPR:
2303 return COMPCODE_GE;
2304 case ORDERED_EXPR:
2305 return COMPCODE_ORD;
2306 case UNORDERED_EXPR:
2307 return COMPCODE_UNORD;
2308 case UNLT_EXPR:
2309 return COMPCODE_UNLT;
2310 case UNEQ_EXPR:
2311 return COMPCODE_UNEQ;
2312 case UNLE_EXPR:
2313 return COMPCODE_UNLE;
2314 case UNGT_EXPR:
2315 return COMPCODE_UNGT;
2316 case LTGT_EXPR:
2317 return COMPCODE_LTGT;
2318 case UNGE_EXPR:
2319 return COMPCODE_UNGE;
2320 default:
2321 gcc_unreachable ();
2325 /* Convert a compcode bit-based encoding of a comparison operator back
2326 to GCC's enum tree_code representation. This function is the
2327 inverse of comparison_to_compcode. */
2329 static enum tree_code
2330 compcode_to_comparison (enum comparison_code code)
2332 switch (code)
2334 case COMPCODE_LT:
2335 return LT_EXPR;
2336 case COMPCODE_EQ:
2337 return EQ_EXPR;
2338 case COMPCODE_LE:
2339 return LE_EXPR;
2340 case COMPCODE_GT:
2341 return GT_EXPR;
2342 case COMPCODE_NE:
2343 return NE_EXPR;
2344 case COMPCODE_GE:
2345 return GE_EXPR;
2346 case COMPCODE_ORD:
2347 return ORDERED_EXPR;
2348 case COMPCODE_UNORD:
2349 return UNORDERED_EXPR;
2350 case COMPCODE_UNLT:
2351 return UNLT_EXPR;
2352 case COMPCODE_UNEQ:
2353 return UNEQ_EXPR;
2354 case COMPCODE_UNLE:
2355 return UNLE_EXPR;
2356 case COMPCODE_UNGT:
2357 return UNGT_EXPR;
2358 case COMPCODE_LTGT:
2359 return LTGT_EXPR;
2360 case COMPCODE_UNGE:
2361 return UNGE_EXPR;
2362 default:
2363 gcc_unreachable ();
2367 /* Return a tree for the comparison which is the combination of
2368 doing the AND or OR (depending on CODE) of the two operations LCODE
2369 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2370 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2371 if this makes the transformation invalid. */
2373 tree
2374 combine_comparisons (location_t loc,
2375 enum tree_code code, enum tree_code lcode,
2376 enum tree_code rcode, tree truth_type,
2377 tree ll_arg, tree lr_arg)
2379 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2380 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2381 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2382 int compcode;
2384 switch (code)
2386 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2387 compcode = lcompcode & rcompcode;
2388 break;
2390 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2391 compcode = lcompcode | rcompcode;
2392 break;
2394 default:
2395 return NULL_TREE;
2398 if (!honor_nans)
2400 /* Eliminate unordered comparisons, as well as LTGT and ORD
2401 which are not used unless the mode has NaNs. */
2402 compcode &= ~COMPCODE_UNORD;
2403 if (compcode == COMPCODE_LTGT)
2404 compcode = COMPCODE_NE;
2405 else if (compcode == COMPCODE_ORD)
2406 compcode = COMPCODE_TRUE;
2408 else if (flag_trapping_math)
2410 /* Check that the original operation and the optimized ones will trap
2411 under the same condition. */
2412 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2413 && (lcompcode != COMPCODE_EQ)
2414 && (lcompcode != COMPCODE_ORD);
2415 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2416 && (rcompcode != COMPCODE_EQ)
2417 && (rcompcode != COMPCODE_ORD);
2418 bool trap = (compcode & COMPCODE_UNORD) == 0
2419 && (compcode != COMPCODE_EQ)
2420 && (compcode != COMPCODE_ORD);
2422 /* In a short-circuited boolean expression the LHS might be
2423 such that the RHS, if evaluated, will never trap. For
2424 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2425 if neither x nor y is NaN. (This is a mixed blessing: for
2426 example, the expression above will never trap, hence
2427 optimizing it to x < y would be invalid). */
2428 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2429 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2430 rtrap = false;
2432 /* If the comparison was short-circuited, and only the RHS
2433 trapped, we may now generate a spurious trap. */
2434 if (rtrap && !ltrap
2435 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2436 return NULL_TREE;
2438 /* If we changed the conditions that cause a trap, we lose. */
2439 if ((ltrap || rtrap) != trap)
2440 return NULL_TREE;
2443 if (compcode == COMPCODE_TRUE)
2444 return constant_boolean_node (true, truth_type);
2445 else if (compcode == COMPCODE_FALSE)
2446 return constant_boolean_node (false, truth_type);
2447 else
2449 enum tree_code tcode;
2451 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2452 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2456 /* Return nonzero if two operands (typically of the same tree node)
2457 are necessarily equal. If either argument has side-effects this
2458 function returns zero. FLAGS modifies behavior as follows:
2460 If OEP_ONLY_CONST is set, only return nonzero for constants.
2461 This function tests whether the operands are indistinguishable;
2462 it does not test whether they are equal using C's == operation.
2463 The distinction is important for IEEE floating point, because
2464 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2465 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2467 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2468 even though it may hold multiple values during a function.
2469 This is because a GCC tree node guarantees that nothing else is
2470 executed between the evaluation of its "operands" (which may often
2471 be evaluated in arbitrary order). Hence if the operands themselves
2472 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2473 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2474 unset means assuming isochronic (or instantaneous) tree equivalence.
2475 Unless comparing arbitrary expression trees, such as from different
2476 statements, this flag can usually be left unset.
2478 If OEP_PURE_SAME is set, then pure functions with identical arguments
2479 are considered the same. It is used when the caller has other ways
2480 to ensure that global memory is unchanged in between.
2482 If OEP_ALLOW_NULL is set, this routine will not crash on NULL operands,
2483 and two NULL operands are considered equal. This flag is usually set
2484 in the context of frontend when ARG0 and/or ARG1 may be NULL mostly due
2485 to recursion on partially built expressions (e.g. a CAST_EXPR on a NULL
2486 tree.) In this case, we certainly don't want the compiler to crash and
2487 it's OK to consider two NULL operands equal. On the other hand, when
2488 called in the context of code generation and optimization, if NULL
2489 operands are not expected, silently ignoring them could be dangerous
2490 and might cause problems downstream that are hard to find/debug. In that
2491 case, the flag should probably not be set. */
2494 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2496 /* If either is NULL, they must be both NULL to be equal. We only do this
2497 check when OEP_ALLOW_NULL is set. */
2498 if ((flags & OEP_ALLOW_NULL) && (!arg0 || !arg1))
2499 return arg0 == arg1;
2501 /* If either is ERROR_MARK, they aren't equal. */
2502 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2503 || TREE_TYPE (arg0) == error_mark_node
2504 || TREE_TYPE (arg1) == error_mark_node)
2505 return 0;
2507 /* Similar, if either does not have a type (like a released SSA name),
2508 they aren't equal. */
2509 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2511 /* If the caller chooses to allow the comparison of operands without
2512 types, we will continue the comparison only when both of them don't
2513 have a type. */
2514 if (!(flags & OEP_ALLOW_NO_TYPE) || TREE_TYPE (arg0) || TREE_TYPE (arg1))
2515 return 0;
2518 /* Check equality of integer constants before bailing out due to
2519 precision differences. */
2520 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2521 return tree_int_cst_equal (arg0, arg1);
2523 /* If both types don't have the same signedness, then we can't consider
2524 them equal. We must check this before the STRIP_NOPS calls
2525 because they may change the signedness of the arguments. As pointers
2526 strictly don't have a signedness, require either two pointers or
2527 two non-pointers as well. */
2528 if (TREE_TYPE (arg0)
2529 && (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2530 || POINTER_TYPE_P (TREE_TYPE (arg0))
2531 != POINTER_TYPE_P (TREE_TYPE (arg1))))
2532 return 0;
2534 /* We cannot consider pointers to different address space equal. */
2535 if (TREE_TYPE (arg0)
2536 && (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2537 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2538 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1))))))
2539 return 0;
2541 /* If both types don't have the same precision, then it is not safe
2542 to strip NOPs. */
2543 if (element_precision (TREE_TYPE (arg0))
2544 != element_precision (TREE_TYPE (arg1)))
2545 return 0;
2547 STRIP_NOPS (arg0);
2548 STRIP_NOPS (arg1);
2550 /* In case both args are comparisons but with different comparison
2551 code, try to swap the comparison operands of one arg to produce
2552 a match and compare that variant. */
2553 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2554 && COMPARISON_CLASS_P (arg0)
2555 && COMPARISON_CLASS_P (arg1))
2557 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2559 if (TREE_CODE (arg0) == swap_code)
2560 return operand_equal_p (TREE_OPERAND (arg0, 0),
2561 TREE_OPERAND (arg1, 1), flags)
2562 && operand_equal_p (TREE_OPERAND (arg0, 1),
2563 TREE_OPERAND (arg1, 0), flags);
2566 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2567 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2568 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2569 return 0;
2571 /* This is needed for conversions and for COMPONENT_REF.
2572 Might as well play it safe and always test this. */
2573 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2574 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2575 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2576 return 0;
2578 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2579 We don't care about side effects in that case because the SAVE_EXPR
2580 takes care of that for us. In all other cases, two expressions are
2581 equal if they have no side effects. If we have two identical
2582 expressions with side effects that should be treated the same due
2583 to the only side effects being identical SAVE_EXPR's, that will
2584 be detected in the recursive calls below.
2585 If we are taking an invariant address of two identical objects
2586 they are necessarily equal as well. */
2587 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2588 && (TREE_CODE (arg0) == SAVE_EXPR
2589 || (flags & OEP_CONSTANT_ADDRESS_OF)
2590 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2591 return 1;
2593 /* Next handle constant cases, those for which we can return 1 even
2594 if ONLY_CONST is set. */
2595 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2596 switch (TREE_CODE (arg0))
2598 case INTEGER_CST:
2599 return tree_int_cst_equal (arg0, arg1);
2601 case FIXED_CST:
2602 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2603 TREE_FIXED_CST (arg1));
2605 case REAL_CST:
2606 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2607 TREE_REAL_CST (arg1)))
2608 return 1;
2611 if (TREE_TYPE (arg0)
2612 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2614 /* If we do not distinguish between signed and unsigned zero,
2615 consider them equal. */
2616 if (real_zerop (arg0) && real_zerop (arg1))
2617 return 1;
2619 return 0;
2621 case VECTOR_CST:
2623 unsigned i;
2625 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2626 return 0;
2628 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2630 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2631 VECTOR_CST_ELT (arg1, i), flags))
2632 return 0;
2634 return 1;
2637 case COMPLEX_CST:
2638 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2639 flags)
2640 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2641 flags));
2643 case STRING_CST:
2644 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2645 && ! memcmp (TREE_STRING_POINTER (arg0),
2646 TREE_STRING_POINTER (arg1),
2647 TREE_STRING_LENGTH (arg0)));
2649 case ADDR_EXPR:
2650 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2651 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2652 ? OEP_CONSTANT_ADDRESS_OF : 0);
2653 default:
2654 break;
2657 if (flags & OEP_ONLY_CONST)
2658 return 0;
2660 /* Define macros to test an operand from arg0 and arg1 for equality and a
2661 variant that allows null and views null as being different from any
2662 non-null value. In the latter case, if either is null, the both
2663 must be; otherwise, do the normal comparison. */
2664 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2665 TREE_OPERAND (arg1, N), flags)
2667 #define OP_SAME_WITH_NULL(N) \
2668 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2669 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2671 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2673 case tcc_unary:
2674 /* Two conversions are equal only if signedness and modes match. */
2675 switch (TREE_CODE (arg0))
2677 CASE_CONVERT:
2678 case FIX_TRUNC_EXPR:
2679 if (TREE_TYPE (arg0)
2680 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2681 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2682 return 0;
2683 break;
2684 default:
2685 break;
2688 return OP_SAME (0);
2691 case tcc_comparison:
2692 case tcc_binary:
2693 if (OP_SAME (0) && OP_SAME (1))
2694 return 1;
2696 /* For commutative ops, allow the other order. */
2697 return (commutative_tree_code (TREE_CODE (arg0))
2698 && operand_equal_p (TREE_OPERAND (arg0, 0),
2699 TREE_OPERAND (arg1, 1), flags)
2700 && operand_equal_p (TREE_OPERAND (arg0, 1),
2701 TREE_OPERAND (arg1, 0), flags));
2703 case tcc_reference:
2704 /* If either of the pointer (or reference) expressions we are
2705 dereferencing contain a side effect, these cannot be equal,
2706 but their addresses can be. */
2707 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2708 && (TREE_SIDE_EFFECTS (arg0)
2709 || TREE_SIDE_EFFECTS (arg1)))
2710 return 0;
2712 switch (TREE_CODE (arg0))
2714 case INDIRECT_REF:
2715 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2716 return OP_SAME (0);
2718 case REALPART_EXPR:
2719 case IMAGPART_EXPR:
2720 return OP_SAME (0);
2722 case TARGET_MEM_REF:
2723 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2724 /* Require equal extra operands and then fall through to MEM_REF
2725 handling of the two common operands. */
2726 if (!OP_SAME_WITH_NULL (2)
2727 || !OP_SAME_WITH_NULL (3)
2728 || !OP_SAME_WITH_NULL (4))
2729 return 0;
2730 /* Fallthru. */
2731 case MEM_REF:
2732 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2733 /* Require equal access sizes, and similar pointer types.
2734 We can have incomplete types for array references of
2735 variable-sized arrays from the Fortran frontend
2736 though. Also verify the types are compatible. */
2737 return (TREE_TYPE (arg0)
2738 && (TYPE_SIZE (TREE_TYPE (arg0))
2739 == TYPE_SIZE (TREE_TYPE (arg1))
2740 || (TYPE_SIZE (TREE_TYPE (arg0))
2741 && TYPE_SIZE (TREE_TYPE (arg1))
2742 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2743 TYPE_SIZE (TREE_TYPE (arg1)),
2744 flags)))
2745 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2746 && alias_ptr_types_compatible_p
2747 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2748 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2749 && OP_SAME (0) && OP_SAME (1));
2751 case ARRAY_REF:
2752 case ARRAY_RANGE_REF:
2753 /* Operands 2 and 3 may be null.
2754 Compare the array index by value if it is constant first as we
2755 may have different types but same value here. */
2756 if (!OP_SAME (0))
2757 return 0;
2758 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2759 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2760 TREE_OPERAND (arg1, 1))
2761 || OP_SAME (1))
2762 && OP_SAME_WITH_NULL (2)
2763 && OP_SAME_WITH_NULL (3));
2765 case COMPONENT_REF:
2766 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2767 may be NULL when we're called to compare MEM_EXPRs. */
2768 if (!OP_SAME_WITH_NULL (0)
2769 || !OP_SAME (1))
2770 return 0;
2771 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2772 return OP_SAME_WITH_NULL (2);
2774 case BIT_FIELD_REF:
2775 if (!OP_SAME (0))
2776 return 0;
2777 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2778 return OP_SAME (1) && OP_SAME (2);
2780 default:
2781 return 0;
2784 case tcc_expression:
2785 switch (TREE_CODE (arg0))
2787 case ADDR_EXPR:
2788 case TRUTH_NOT_EXPR:
2789 return OP_SAME (0);
2791 case TRUTH_ANDIF_EXPR:
2792 case TRUTH_ORIF_EXPR:
2793 return OP_SAME (0) && OP_SAME (1);
2795 case FMA_EXPR:
2796 case WIDEN_MULT_PLUS_EXPR:
2797 case WIDEN_MULT_MINUS_EXPR:
2798 if (!OP_SAME (2))
2799 return 0;
2800 /* The multiplcation operands are commutative. */
2801 /* FALLTHRU */
2803 case TRUTH_AND_EXPR:
2804 case TRUTH_OR_EXPR:
2805 case TRUTH_XOR_EXPR:
2806 if (OP_SAME (0) && OP_SAME (1))
2807 return 1;
2809 /* Otherwise take into account this is a commutative operation. */
2810 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2811 TREE_OPERAND (arg1, 1), flags)
2812 && operand_equal_p (TREE_OPERAND (arg0, 1),
2813 TREE_OPERAND (arg1, 0), flags));
2815 case COND_EXPR:
2816 case VEC_COND_EXPR:
2817 case DOT_PROD_EXPR:
2818 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2820 default:
2821 return 0;
2824 case tcc_vl_exp:
2825 switch (TREE_CODE (arg0))
2827 case CALL_EXPR:
2828 /* If the CALL_EXPRs call different functions, then they
2829 clearly can not be equal. */
2830 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2831 flags))
2832 return 0;
2835 unsigned int cef = call_expr_flags (arg0);
2836 if (flags & OEP_PURE_SAME)
2837 cef &= ECF_CONST | ECF_PURE;
2838 else
2839 cef &= ECF_CONST;
2840 if (!cef)
2841 return 0;
2844 /* Now see if all the arguments are the same. */
2846 const_call_expr_arg_iterator iter0, iter1;
2847 const_tree a0, a1;
2848 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2849 a1 = first_const_call_expr_arg (arg1, &iter1);
2850 a0 && a1;
2851 a0 = next_const_call_expr_arg (&iter0),
2852 a1 = next_const_call_expr_arg (&iter1))
2853 if (! operand_equal_p (a0, a1, flags))
2854 return 0;
2856 /* If we get here and both argument lists are exhausted
2857 then the CALL_EXPRs are equal. */
2858 return ! (a0 || a1);
2860 default:
2861 return 0;
2864 case tcc_declaration:
2865 /* Consider __builtin_sqrt equal to sqrt. */
2866 return (TREE_CODE (arg0) == FUNCTION_DECL
2867 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2868 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2869 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2871 default:
2872 return 0;
2875 #undef OP_SAME
2876 #undef OP_SAME_WITH_NULL
2879 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2880 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2882 When in doubt, return 0. */
2884 static int
2885 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2887 int unsignedp1, unsignedpo;
2888 tree primarg0, primarg1, primother;
2889 unsigned int correct_width;
2891 if (operand_equal_p (arg0, arg1, 0))
2892 return 1;
2894 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2895 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2896 return 0;
2898 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2899 and see if the inner values are the same. This removes any
2900 signedness comparison, which doesn't matter here. */
2901 primarg0 = arg0, primarg1 = arg1;
2902 STRIP_NOPS (primarg0);
2903 STRIP_NOPS (primarg1);
2904 if (operand_equal_p (primarg0, primarg1, 0))
2905 return 1;
2907 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2908 actual comparison operand, ARG0.
2910 First throw away any conversions to wider types
2911 already present in the operands. */
2913 primarg1 = get_narrower (arg1, &unsignedp1);
2914 primother = get_narrower (other, &unsignedpo);
2916 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2917 if (unsignedp1 == unsignedpo
2918 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2919 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2921 tree type = TREE_TYPE (arg0);
2923 /* Make sure shorter operand is extended the right way
2924 to match the longer operand. */
2925 primarg1 = fold_convert (signed_or_unsigned_type_for
2926 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2928 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2929 return 1;
2932 return 0;
2935 /* See if ARG is an expression that is either a comparison or is performing
2936 arithmetic on comparisons. The comparisons must only be comparing
2937 two different values, which will be stored in *CVAL1 and *CVAL2; if
2938 they are nonzero it means that some operands have already been found.
2939 No variables may be used anywhere else in the expression except in the
2940 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2941 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2943 If this is true, return 1. Otherwise, return zero. */
2945 static int
2946 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2948 enum tree_code code = TREE_CODE (arg);
2949 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2951 /* We can handle some of the tcc_expression cases here. */
2952 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2953 tclass = tcc_unary;
2954 else if (tclass == tcc_expression
2955 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2956 || code == COMPOUND_EXPR))
2957 tclass = tcc_binary;
2959 else if (tclass == tcc_expression && code == SAVE_EXPR
2960 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2962 /* If we've already found a CVAL1 or CVAL2, this expression is
2963 two complex to handle. */
2964 if (*cval1 || *cval2)
2965 return 0;
2967 tclass = tcc_unary;
2968 *save_p = 1;
2971 switch (tclass)
2973 case tcc_unary:
2974 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2976 case tcc_binary:
2977 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2978 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2979 cval1, cval2, save_p));
2981 case tcc_constant:
2982 return 1;
2984 case tcc_expression:
2985 if (code == COND_EXPR)
2986 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2987 cval1, cval2, save_p)
2988 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2989 cval1, cval2, save_p)
2990 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2991 cval1, cval2, save_p));
2992 return 0;
2994 case tcc_comparison:
2995 /* First see if we can handle the first operand, then the second. For
2996 the second operand, we know *CVAL1 can't be zero. It must be that
2997 one side of the comparison is each of the values; test for the
2998 case where this isn't true by failing if the two operands
2999 are the same. */
3001 if (operand_equal_p (TREE_OPERAND (arg, 0),
3002 TREE_OPERAND (arg, 1), 0))
3003 return 0;
3005 if (*cval1 == 0)
3006 *cval1 = TREE_OPERAND (arg, 0);
3007 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3009 else if (*cval2 == 0)
3010 *cval2 = TREE_OPERAND (arg, 0);
3011 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3013 else
3014 return 0;
3016 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3018 else if (*cval2 == 0)
3019 *cval2 = TREE_OPERAND (arg, 1);
3020 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3022 else
3023 return 0;
3025 return 1;
3027 default:
3028 return 0;
3032 /* ARG is a tree that is known to contain just arithmetic operations and
3033 comparisons. Evaluate the operations in the tree substituting NEW0 for
3034 any occurrence of OLD0 as an operand of a comparison and likewise for
3035 NEW1 and OLD1. */
3037 static tree
3038 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3039 tree old1, tree new1)
3041 tree type = TREE_TYPE (arg);
3042 enum tree_code code = TREE_CODE (arg);
3043 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3045 /* We can handle some of the tcc_expression cases here. */
3046 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3047 tclass = tcc_unary;
3048 else if (tclass == tcc_expression
3049 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3050 tclass = tcc_binary;
3052 switch (tclass)
3054 case tcc_unary:
3055 return fold_build1_loc (loc, code, type,
3056 eval_subst (loc, TREE_OPERAND (arg, 0),
3057 old0, new0, old1, new1));
3059 case tcc_binary:
3060 return fold_build2_loc (loc, code, type,
3061 eval_subst (loc, TREE_OPERAND (arg, 0),
3062 old0, new0, old1, new1),
3063 eval_subst (loc, TREE_OPERAND (arg, 1),
3064 old0, new0, old1, new1));
3066 case tcc_expression:
3067 switch (code)
3069 case SAVE_EXPR:
3070 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3071 old1, new1);
3073 case COMPOUND_EXPR:
3074 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3075 old1, new1);
3077 case COND_EXPR:
3078 return fold_build3_loc (loc, code, type,
3079 eval_subst (loc, TREE_OPERAND (arg, 0),
3080 old0, new0, old1, new1),
3081 eval_subst (loc, TREE_OPERAND (arg, 1),
3082 old0, new0, old1, new1),
3083 eval_subst (loc, TREE_OPERAND (arg, 2),
3084 old0, new0, old1, new1));
3085 default:
3086 break;
3088 /* Fall through - ??? */
3090 case tcc_comparison:
3092 tree arg0 = TREE_OPERAND (arg, 0);
3093 tree arg1 = TREE_OPERAND (arg, 1);
3095 /* We need to check both for exact equality and tree equality. The
3096 former will be true if the operand has a side-effect. In that
3097 case, we know the operand occurred exactly once. */
3099 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3100 arg0 = new0;
3101 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3102 arg0 = new1;
3104 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3105 arg1 = new0;
3106 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3107 arg1 = new1;
3109 return fold_build2_loc (loc, code, type, arg0, arg1);
3112 default:
3113 return arg;
3117 /* Return a tree for the case when the result of an expression is RESULT
3118 converted to TYPE and OMITTED was previously an operand of the expression
3119 but is now not needed (e.g., we folded OMITTED * 0).
3121 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3122 the conversion of RESULT to TYPE. */
3124 tree
3125 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3127 tree t = fold_convert_loc (loc, type, result);
3129 /* If the resulting operand is an empty statement, just return the omitted
3130 statement casted to void. */
3131 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3132 return build1_loc (loc, NOP_EXPR, void_type_node,
3133 fold_ignored_result (omitted));
3135 if (TREE_SIDE_EFFECTS (omitted))
3136 return build2_loc (loc, COMPOUND_EXPR, type,
3137 fold_ignored_result (omitted), t);
3139 return non_lvalue_loc (loc, t);
3142 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3144 static tree
3145 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3146 tree omitted)
3148 tree t = fold_convert_loc (loc, type, result);
3150 /* If the resulting operand is an empty statement, just return the omitted
3151 statement casted to void. */
3152 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3153 return build1_loc (loc, NOP_EXPR, void_type_node,
3154 fold_ignored_result (omitted));
3156 if (TREE_SIDE_EFFECTS (omitted))
3157 return build2_loc (loc, COMPOUND_EXPR, type,
3158 fold_ignored_result (omitted), t);
3160 return pedantic_non_lvalue_loc (loc, t);
3163 /* Return a tree for the case when the result of an expression is RESULT
3164 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3165 of the expression but are now not needed.
3167 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3168 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3169 evaluated before OMITTED2. Otherwise, if neither has side effects,
3170 just do the conversion of RESULT to TYPE. */
3172 tree
3173 omit_two_operands_loc (location_t loc, tree type, tree result,
3174 tree omitted1, tree omitted2)
3176 tree t = fold_convert_loc (loc, type, result);
3178 if (TREE_SIDE_EFFECTS (omitted2))
3179 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3180 if (TREE_SIDE_EFFECTS (omitted1))
3181 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3183 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3187 /* Return a simplified tree node for the truth-negation of ARG. This
3188 never alters ARG itself. We assume that ARG is an operation that
3189 returns a truth value (0 or 1).
3191 FIXME: one would think we would fold the result, but it causes
3192 problems with the dominator optimizer. */
3194 static tree
3195 fold_truth_not_expr (location_t loc, tree arg)
3197 tree type = TREE_TYPE (arg);
3198 enum tree_code code = TREE_CODE (arg);
3199 location_t loc1, loc2;
3201 /* If this is a comparison, we can simply invert it, except for
3202 floating-point non-equality comparisons, in which case we just
3203 enclose a TRUTH_NOT_EXPR around what we have. */
3205 if (TREE_CODE_CLASS (code) == tcc_comparison)
3207 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3208 if (FLOAT_TYPE_P (op_type)
3209 && flag_trapping_math
3210 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3211 && code != NE_EXPR && code != EQ_EXPR)
3212 return NULL_TREE;
3214 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3215 if (code == ERROR_MARK)
3216 return NULL_TREE;
3218 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3219 TREE_OPERAND (arg, 1));
3222 switch (code)
3224 case INTEGER_CST:
3225 return constant_boolean_node (integer_zerop (arg), type);
3227 case TRUTH_AND_EXPR:
3228 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3229 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3230 return build2_loc (loc, TRUTH_OR_EXPR, type,
3231 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3232 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3234 case TRUTH_OR_EXPR:
3235 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3236 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3237 return build2_loc (loc, TRUTH_AND_EXPR, type,
3238 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3239 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3241 case TRUTH_XOR_EXPR:
3242 /* Here we can invert either operand. We invert the first operand
3243 unless the second operand is a TRUTH_NOT_EXPR in which case our
3244 result is the XOR of the first operand with the inside of the
3245 negation of the second operand. */
3247 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3248 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3249 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3250 else
3251 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3252 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3253 TREE_OPERAND (arg, 1));
3255 case TRUTH_ANDIF_EXPR:
3256 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3257 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3258 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3259 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3260 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3262 case TRUTH_ORIF_EXPR:
3263 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3264 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3265 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3266 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3267 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3269 case TRUTH_NOT_EXPR:
3270 return TREE_OPERAND (arg, 0);
3272 case COND_EXPR:
3274 tree arg1 = TREE_OPERAND (arg, 1);
3275 tree arg2 = TREE_OPERAND (arg, 2);
3277 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3278 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3280 /* A COND_EXPR may have a throw as one operand, which
3281 then has void type. Just leave void operands
3282 as they are. */
3283 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3284 VOID_TYPE_P (TREE_TYPE (arg1))
3285 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3286 VOID_TYPE_P (TREE_TYPE (arg2))
3287 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3290 case COMPOUND_EXPR:
3291 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3292 return build2_loc (loc, COMPOUND_EXPR, type,
3293 TREE_OPERAND (arg, 0),
3294 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3296 case NON_LVALUE_EXPR:
3297 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3298 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3300 CASE_CONVERT:
3301 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3302 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3304 /* ... fall through ... */
3306 case FLOAT_EXPR:
3307 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3308 return build1_loc (loc, TREE_CODE (arg), type,
3309 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3311 case BIT_AND_EXPR:
3312 if (!integer_onep (TREE_OPERAND (arg, 1)))
3313 return NULL_TREE;
3314 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3316 case SAVE_EXPR:
3317 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3319 case CLEANUP_POINT_EXPR:
3320 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3321 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3322 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3324 default:
3325 return NULL_TREE;
3329 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3330 assume that ARG is an operation that returns a truth value (0 or 1
3331 for scalars, 0 or -1 for vectors). Return the folded expression if
3332 folding is successful. Otherwise, return NULL_TREE. */
3334 static tree
3335 fold_invert_truthvalue (location_t loc, tree arg)
3337 tree type = TREE_TYPE (arg);
3338 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3339 ? BIT_NOT_EXPR
3340 : TRUTH_NOT_EXPR,
3341 type, arg);
3344 /* Return a simplified tree node for the truth-negation of ARG. This
3345 never alters ARG itself. We assume that ARG is an operation that
3346 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3348 tree
3349 invert_truthvalue_loc (location_t loc, tree arg)
3351 if (TREE_CODE (arg) == ERROR_MARK)
3352 return arg;
3354 tree type = TREE_TYPE (arg);
3355 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3356 ? BIT_NOT_EXPR
3357 : TRUTH_NOT_EXPR,
3358 type, arg);
3361 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3362 operands are another bit-wise operation with a common input. If so,
3363 distribute the bit operations to save an operation and possibly two if
3364 constants are involved. For example, convert
3365 (A | B) & (A | C) into A | (B & C)
3366 Further simplification will occur if B and C are constants.
3368 If this optimization cannot be done, 0 will be returned. */
3370 static tree
3371 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3372 tree arg0, tree arg1)
3374 tree common;
3375 tree left, right;
3377 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3378 || TREE_CODE (arg0) == code
3379 || (TREE_CODE (arg0) != BIT_AND_EXPR
3380 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3381 return 0;
3383 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3385 common = TREE_OPERAND (arg0, 0);
3386 left = TREE_OPERAND (arg0, 1);
3387 right = TREE_OPERAND (arg1, 1);
3389 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3391 common = TREE_OPERAND (arg0, 0);
3392 left = TREE_OPERAND (arg0, 1);
3393 right = TREE_OPERAND (arg1, 0);
3395 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3397 common = TREE_OPERAND (arg0, 1);
3398 left = TREE_OPERAND (arg0, 0);
3399 right = TREE_OPERAND (arg1, 1);
3401 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3403 common = TREE_OPERAND (arg0, 1);
3404 left = TREE_OPERAND (arg0, 0);
3405 right = TREE_OPERAND (arg1, 0);
3407 else
3408 return 0;
3410 common = fold_convert_loc (loc, type, common);
3411 left = fold_convert_loc (loc, type, left);
3412 right = fold_convert_loc (loc, type, right);
3413 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3414 fold_build2_loc (loc, code, type, left, right));
3417 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3418 with code CODE. This optimization is unsafe. */
3419 static tree
3420 distribute_real_division (location_t loc, enum tree_code code, tree type,
3421 tree arg0, tree arg1)
3423 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3424 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3426 /* (A / C) +- (B / C) -> (A +- B) / C. */
3427 if (mul0 == mul1
3428 && operand_equal_p (TREE_OPERAND (arg0, 1),
3429 TREE_OPERAND (arg1, 1), 0))
3430 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3431 fold_build2_loc (loc, code, type,
3432 TREE_OPERAND (arg0, 0),
3433 TREE_OPERAND (arg1, 0)),
3434 TREE_OPERAND (arg0, 1));
3436 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3437 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3438 TREE_OPERAND (arg1, 0), 0)
3439 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3440 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3442 REAL_VALUE_TYPE r0, r1;
3443 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3444 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3445 if (!mul0)
3446 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3447 if (!mul1)
3448 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3449 real_arithmetic (&r0, code, &r0, &r1);
3450 return fold_build2_loc (loc, MULT_EXPR, type,
3451 TREE_OPERAND (arg0, 0),
3452 build_real (type, r0));
3455 return NULL_TREE;
3458 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3459 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3461 static tree
3462 make_bit_field_ref (location_t loc, tree inner, tree type,
3463 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3465 tree result, bftype;
3467 if (bitpos == 0)
3469 tree size = TYPE_SIZE (TREE_TYPE (inner));
3470 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3471 || POINTER_TYPE_P (TREE_TYPE (inner)))
3472 && tree_fits_shwi_p (size)
3473 && tree_to_shwi (size) == bitsize)
3474 return fold_convert_loc (loc, type, inner);
3477 bftype = type;
3478 if (TYPE_PRECISION (bftype) != bitsize
3479 || TYPE_UNSIGNED (bftype) == !unsignedp)
3480 bftype = build_nonstandard_integer_type (bitsize, 0);
3482 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3483 size_int (bitsize), bitsize_int (bitpos));
3485 if (bftype != type)
3486 result = fold_convert_loc (loc, type, result);
3488 return result;
3491 /* Optimize a bit-field compare.
3493 There are two cases: First is a compare against a constant and the
3494 second is a comparison of two items where the fields are at the same
3495 bit position relative to the start of a chunk (byte, halfword, word)
3496 large enough to contain it. In these cases we can avoid the shift
3497 implicit in bitfield extractions.
3499 For constants, we emit a compare of the shifted constant with the
3500 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3501 compared. For two fields at the same position, we do the ANDs with the
3502 similar mask and compare the result of the ANDs.
3504 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3505 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3506 are the left and right operands of the comparison, respectively.
3508 If the optimization described above can be done, we return the resulting
3509 tree. Otherwise we return zero. */
3511 static tree
3512 optimize_bit_field_compare (location_t loc, enum tree_code code,
3513 tree compare_type, tree lhs, tree rhs)
3515 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3516 tree type = TREE_TYPE (lhs);
3517 tree signed_type, unsigned_type;
3518 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3519 enum machine_mode lmode, rmode, nmode;
3520 int lunsignedp, runsignedp;
3521 int lvolatilep = 0, rvolatilep = 0;
3522 tree linner, rinner = NULL_TREE;
3523 tree mask;
3524 tree offset;
3526 /* Get all the information about the extractions being done. If the bit size
3527 if the same as the size of the underlying object, we aren't doing an
3528 extraction at all and so can do nothing. We also don't want to
3529 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3530 then will no longer be able to replace it. */
3531 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3532 &lunsignedp, &lvolatilep, false);
3533 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3534 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3535 return 0;
3537 if (!const_p)
3539 /* If this is not a constant, we can only do something if bit positions,
3540 sizes, and signedness are the same. */
3541 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3542 &runsignedp, &rvolatilep, false);
3544 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3545 || lunsignedp != runsignedp || offset != 0
3546 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3547 return 0;
3550 /* See if we can find a mode to refer to this field. We should be able to,
3551 but fail if we can't. */
3552 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3553 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3554 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3555 TYPE_ALIGN (TREE_TYPE (rinner))),
3556 word_mode, false);
3557 if (nmode == VOIDmode)
3558 return 0;
3560 /* Set signed and unsigned types of the precision of this mode for the
3561 shifts below. */
3562 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3563 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3565 /* Compute the bit position and size for the new reference and our offset
3566 within it. If the new reference is the same size as the original, we
3567 won't optimize anything, so return zero. */
3568 nbitsize = GET_MODE_BITSIZE (nmode);
3569 nbitpos = lbitpos & ~ (nbitsize - 1);
3570 lbitpos -= nbitpos;
3571 if (nbitsize == lbitsize)
3572 return 0;
3574 if (BYTES_BIG_ENDIAN)
3575 lbitpos = nbitsize - lbitsize - lbitpos;
3577 /* Make the mask to be used against the extracted field. */
3578 mask = build_int_cst_type (unsigned_type, -1);
3579 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3580 mask = const_binop (RSHIFT_EXPR, mask,
3581 size_int (nbitsize - lbitsize - lbitpos));
3583 if (! const_p)
3584 /* If not comparing with constant, just rework the comparison
3585 and return. */
3586 return fold_build2_loc (loc, code, compare_type,
3587 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3588 make_bit_field_ref (loc, linner,
3589 unsigned_type,
3590 nbitsize, nbitpos,
3592 mask),
3593 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3594 make_bit_field_ref (loc, rinner,
3595 unsigned_type,
3596 nbitsize, nbitpos,
3598 mask));
3600 /* Otherwise, we are handling the constant case. See if the constant is too
3601 big for the field. Warn and return a tree of for 0 (false) if so. We do
3602 this not only for its own sake, but to avoid having to test for this
3603 error case below. If we didn't, we might generate wrong code.
3605 For unsigned fields, the constant shifted right by the field length should
3606 be all zero. For signed fields, the high-order bits should agree with
3607 the sign bit. */
3609 if (lunsignedp)
3611 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3612 fold_convert_loc (loc,
3613 unsigned_type, rhs),
3614 size_int (lbitsize))))
3616 warning (0, "comparison is always %d due to width of bit-field",
3617 code == NE_EXPR);
3618 return constant_boolean_node (code == NE_EXPR, compare_type);
3621 else
3623 tree tem = const_binop (RSHIFT_EXPR,
3624 fold_convert_loc (loc, signed_type, rhs),
3625 size_int (lbitsize - 1));
3626 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3628 warning (0, "comparison is always %d due to width of bit-field",
3629 code == NE_EXPR);
3630 return constant_boolean_node (code == NE_EXPR, compare_type);
3634 /* Single-bit compares should always be against zero. */
3635 if (lbitsize == 1 && ! integer_zerop (rhs))
3637 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3638 rhs = build_int_cst (type, 0);
3641 /* Make a new bitfield reference, shift the constant over the
3642 appropriate number of bits and mask it with the computed mask
3643 (in case this was a signed field). If we changed it, make a new one. */
3644 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3646 rhs = const_binop (BIT_AND_EXPR,
3647 const_binop (LSHIFT_EXPR,
3648 fold_convert_loc (loc, unsigned_type, rhs),
3649 size_int (lbitpos)),
3650 mask);
3652 lhs = build2_loc (loc, code, compare_type,
3653 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3654 return lhs;
3657 /* Subroutine for fold_truth_andor_1: decode a field reference.
3659 If EXP is a comparison reference, we return the innermost reference.
3661 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3662 set to the starting bit number.
3664 If the innermost field can be completely contained in a mode-sized
3665 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3667 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3668 otherwise it is not changed.
3670 *PUNSIGNEDP is set to the signedness of the field.
3672 *PMASK is set to the mask used. This is either contained in a
3673 BIT_AND_EXPR or derived from the width of the field.
3675 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3677 Return 0 if this is not a component reference or is one that we can't
3678 do anything with. */
3680 static tree
3681 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3682 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3683 int *punsignedp, int *pvolatilep,
3684 tree *pmask, tree *pand_mask)
3686 tree outer_type = 0;
3687 tree and_mask = 0;
3688 tree mask, inner, offset;
3689 tree unsigned_type;
3690 unsigned int precision;
3692 /* All the optimizations using this function assume integer fields.
3693 There are problems with FP fields since the type_for_size call
3694 below can fail for, e.g., XFmode. */
3695 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3696 return 0;
3698 /* We are interested in the bare arrangement of bits, so strip everything
3699 that doesn't affect the machine mode. However, record the type of the
3700 outermost expression if it may matter below. */
3701 if (CONVERT_EXPR_P (exp)
3702 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3703 outer_type = TREE_TYPE (exp);
3704 STRIP_NOPS (exp);
3706 if (TREE_CODE (exp) == BIT_AND_EXPR)
3708 and_mask = TREE_OPERAND (exp, 1);
3709 exp = TREE_OPERAND (exp, 0);
3710 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3711 if (TREE_CODE (and_mask) != INTEGER_CST)
3712 return 0;
3715 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3716 punsignedp, pvolatilep, false);
3717 if ((inner == exp && and_mask == 0)
3718 || *pbitsize < 0 || offset != 0
3719 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3720 return 0;
3722 /* If the number of bits in the reference is the same as the bitsize of
3723 the outer type, then the outer type gives the signedness. Otherwise
3724 (in case of a small bitfield) the signedness is unchanged. */
3725 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3726 *punsignedp = TYPE_UNSIGNED (outer_type);
3728 /* Compute the mask to access the bitfield. */
3729 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3730 precision = TYPE_PRECISION (unsigned_type);
3732 mask = build_int_cst_type (unsigned_type, -1);
3734 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3735 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3737 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3738 if (and_mask != 0)
3739 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3740 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3742 *pmask = mask;
3743 *pand_mask = and_mask;
3744 return inner;
3747 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3748 bit positions. */
3750 static int
3751 all_ones_mask_p (const_tree mask, int size)
3753 tree type = TREE_TYPE (mask);
3754 unsigned int precision = TYPE_PRECISION (type);
3755 tree tmask;
3757 tmask = build_int_cst_type (signed_type_for (type), -1);
3759 return
3760 tree_int_cst_equal (mask,
3761 const_binop (RSHIFT_EXPR,
3762 const_binop (LSHIFT_EXPR, tmask,
3763 size_int (precision - size)),
3764 size_int (precision - size)));
3767 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3768 represents the sign bit of EXP's type. If EXP represents a sign
3769 or zero extension, also test VAL against the unextended type.
3770 The return value is the (sub)expression whose sign bit is VAL,
3771 or NULL_TREE otherwise. */
3773 static tree
3774 sign_bit_p (tree exp, const_tree val)
3776 unsigned HOST_WIDE_INT mask_lo, lo;
3777 HOST_WIDE_INT mask_hi, hi;
3778 int width;
3779 tree t;
3781 /* Tree EXP must have an integral type. */
3782 t = TREE_TYPE (exp);
3783 if (! INTEGRAL_TYPE_P (t))
3784 return NULL_TREE;
3786 /* Tree VAL must be an integer constant. */
3787 if (TREE_CODE (val) != INTEGER_CST
3788 || TREE_OVERFLOW (val))
3789 return NULL_TREE;
3791 width = TYPE_PRECISION (t);
3792 if (width > HOST_BITS_PER_WIDE_INT)
3794 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3795 lo = 0;
3797 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3798 mask_lo = -1;
3800 else
3802 hi = 0;
3803 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3805 mask_hi = 0;
3806 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3809 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3810 treat VAL as if it were unsigned. */
3811 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3812 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3813 return exp;
3815 /* Handle extension from a narrower type. */
3816 if (TREE_CODE (exp) == NOP_EXPR
3817 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3818 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3820 return NULL_TREE;
3823 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3824 to be evaluated unconditionally. */
3826 static int
3827 simple_operand_p (const_tree exp)
3829 /* Strip any conversions that don't change the machine mode. */
3830 STRIP_NOPS (exp);
3832 return (CONSTANT_CLASS_P (exp)
3833 || TREE_CODE (exp) == SSA_NAME
3834 || (DECL_P (exp)
3835 && ! TREE_ADDRESSABLE (exp)
3836 && ! TREE_THIS_VOLATILE (exp)
3837 && ! DECL_NONLOCAL (exp)
3838 /* Don't regard global variables as simple. They may be
3839 allocated in ways unknown to the compiler (shared memory,
3840 #pragma weak, etc). */
3841 && ! TREE_PUBLIC (exp)
3842 && ! DECL_EXTERNAL (exp)
3843 /* Weakrefs are not safe to be read, since they can be NULL.
3844 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3845 have DECL_WEAK flag set. */
3846 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3847 /* Loading a static variable is unduly expensive, but global
3848 registers aren't expensive. */
3849 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3852 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3853 to be evaluated unconditionally.
3854 I addition to simple_operand_p, we assume that comparisons, conversions,
3855 and logic-not operations are simple, if their operands are simple, too. */
3857 static bool
3858 simple_operand_p_2 (tree exp)
3860 enum tree_code code;
3862 if (TREE_SIDE_EFFECTS (exp)
3863 || tree_could_trap_p (exp))
3864 return false;
3866 while (CONVERT_EXPR_P (exp))
3867 exp = TREE_OPERAND (exp, 0);
3869 code = TREE_CODE (exp);
3871 if (TREE_CODE_CLASS (code) == tcc_comparison)
3872 return (simple_operand_p (TREE_OPERAND (exp, 0))
3873 && simple_operand_p (TREE_OPERAND (exp, 1)));
3875 if (code == TRUTH_NOT_EXPR)
3876 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3878 return simple_operand_p (exp);
3882 /* The following functions are subroutines to fold_range_test and allow it to
3883 try to change a logical combination of comparisons into a range test.
3885 For example, both
3886 X == 2 || X == 3 || X == 4 || X == 5
3888 X >= 2 && X <= 5
3889 are converted to
3890 (unsigned) (X - 2) <= 3
3892 We describe each set of comparisons as being either inside or outside
3893 a range, using a variable named like IN_P, and then describe the
3894 range with a lower and upper bound. If one of the bounds is omitted,
3895 it represents either the highest or lowest value of the type.
3897 In the comments below, we represent a range by two numbers in brackets
3898 preceded by a "+" to designate being inside that range, or a "-" to
3899 designate being outside that range, so the condition can be inverted by
3900 flipping the prefix. An omitted bound is represented by a "-". For
3901 example, "- [-, 10]" means being outside the range starting at the lowest
3902 possible value and ending at 10, in other words, being greater than 10.
3903 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3904 always false.
3906 We set up things so that the missing bounds are handled in a consistent
3907 manner so neither a missing bound nor "true" and "false" need to be
3908 handled using a special case. */
3910 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3911 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3912 and UPPER1_P are nonzero if the respective argument is an upper bound
3913 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3914 must be specified for a comparison. ARG1 will be converted to ARG0's
3915 type if both are specified. */
3917 static tree
3918 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3919 tree arg1, int upper1_p)
3921 tree tem;
3922 int result;
3923 int sgn0, sgn1;
3925 /* If neither arg represents infinity, do the normal operation.
3926 Else, if not a comparison, return infinity. Else handle the special
3927 comparison rules. Note that most of the cases below won't occur, but
3928 are handled for consistency. */
3930 if (arg0 != 0 && arg1 != 0)
3932 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3933 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3934 STRIP_NOPS (tem);
3935 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3938 if (TREE_CODE_CLASS (code) != tcc_comparison)
3939 return 0;
3941 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3942 for neither. In real maths, we cannot assume open ended ranges are
3943 the same. But, this is computer arithmetic, where numbers are finite.
3944 We can therefore make the transformation of any unbounded range with
3945 the value Z, Z being greater than any representable number. This permits
3946 us to treat unbounded ranges as equal. */
3947 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3948 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3949 switch (code)
3951 case EQ_EXPR:
3952 result = sgn0 == sgn1;
3953 break;
3954 case NE_EXPR:
3955 result = sgn0 != sgn1;
3956 break;
3957 case LT_EXPR:
3958 result = sgn0 < sgn1;
3959 break;
3960 case LE_EXPR:
3961 result = sgn0 <= sgn1;
3962 break;
3963 case GT_EXPR:
3964 result = sgn0 > sgn1;
3965 break;
3966 case GE_EXPR:
3967 result = sgn0 >= sgn1;
3968 break;
3969 default:
3970 gcc_unreachable ();
3973 return constant_boolean_node (result, type);
3976 /* Helper routine for make_range. Perform one step for it, return
3977 new expression if the loop should continue or NULL_TREE if it should
3978 stop. */
3980 tree
3981 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3982 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3983 bool *strict_overflow_p)
3985 tree arg0_type = TREE_TYPE (arg0);
3986 tree n_low, n_high, low = *p_low, high = *p_high;
3987 int in_p = *p_in_p, n_in_p;
3989 switch (code)
3991 case TRUTH_NOT_EXPR:
3992 /* We can only do something if the range is testing for zero. */
3993 if (low == NULL_TREE || high == NULL_TREE
3994 || ! integer_zerop (low) || ! integer_zerop (high))
3995 return NULL_TREE;
3996 *p_in_p = ! in_p;
3997 return arg0;
3999 case EQ_EXPR: case NE_EXPR:
4000 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4001 /* We can only do something if the range is testing for zero
4002 and if the second operand is an integer constant. Note that
4003 saying something is "in" the range we make is done by
4004 complementing IN_P since it will set in the initial case of
4005 being not equal to zero; "out" is leaving it alone. */
4006 if (low == NULL_TREE || high == NULL_TREE
4007 || ! integer_zerop (low) || ! integer_zerop (high)
4008 || TREE_CODE (arg1) != INTEGER_CST)
4009 return NULL_TREE;
4011 switch (code)
4013 case NE_EXPR: /* - [c, c] */
4014 low = high = arg1;
4015 break;
4016 case EQ_EXPR: /* + [c, c] */
4017 in_p = ! in_p, low = high = arg1;
4018 break;
4019 case GT_EXPR: /* - [-, c] */
4020 low = 0, high = arg1;
4021 break;
4022 case GE_EXPR: /* + [c, -] */
4023 in_p = ! in_p, low = arg1, high = 0;
4024 break;
4025 case LT_EXPR: /* - [c, -] */
4026 low = arg1, high = 0;
4027 break;
4028 case LE_EXPR: /* + [-, c] */
4029 in_p = ! in_p, low = 0, high = arg1;
4030 break;
4031 default:
4032 gcc_unreachable ();
4035 /* If this is an unsigned comparison, we also know that EXP is
4036 greater than or equal to zero. We base the range tests we make
4037 on that fact, so we record it here so we can parse existing
4038 range tests. We test arg0_type since often the return type
4039 of, e.g. EQ_EXPR, is boolean. */
4040 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4042 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4043 in_p, low, high, 1,
4044 build_int_cst (arg0_type, 0),
4045 NULL_TREE))
4046 return NULL_TREE;
4048 in_p = n_in_p, low = n_low, high = n_high;
4050 /* If the high bound is missing, but we have a nonzero low
4051 bound, reverse the range so it goes from zero to the low bound
4052 minus 1. */
4053 if (high == 0 && low && ! integer_zerop (low))
4055 in_p = ! in_p;
4056 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4057 integer_one_node, 0);
4058 low = build_int_cst (arg0_type, 0);
4062 *p_low = low;
4063 *p_high = high;
4064 *p_in_p = in_p;
4065 return arg0;
4067 case NEGATE_EXPR:
4068 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4069 low and high are non-NULL, then normalize will DTRT. */
4070 if (!TYPE_UNSIGNED (arg0_type)
4071 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4073 if (low == NULL_TREE)
4074 low = TYPE_MIN_VALUE (arg0_type);
4075 if (high == NULL_TREE)
4076 high = TYPE_MAX_VALUE (arg0_type);
4079 /* (-x) IN [a,b] -> x in [-b, -a] */
4080 n_low = range_binop (MINUS_EXPR, exp_type,
4081 build_int_cst (exp_type, 0),
4082 0, high, 1);
4083 n_high = range_binop (MINUS_EXPR, exp_type,
4084 build_int_cst (exp_type, 0),
4085 0, low, 0);
4086 if (n_high != 0 && TREE_OVERFLOW (n_high))
4087 return NULL_TREE;
4088 goto normalize;
4090 case BIT_NOT_EXPR:
4091 /* ~ X -> -X - 1 */
4092 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4093 build_int_cst (exp_type, 1));
4095 case PLUS_EXPR:
4096 case MINUS_EXPR:
4097 if (TREE_CODE (arg1) != INTEGER_CST)
4098 return NULL_TREE;
4100 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4101 move a constant to the other side. */
4102 if (!TYPE_UNSIGNED (arg0_type)
4103 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4104 return NULL_TREE;
4106 /* If EXP is signed, any overflow in the computation is undefined,
4107 so we don't worry about it so long as our computations on
4108 the bounds don't overflow. For unsigned, overflow is defined
4109 and this is exactly the right thing. */
4110 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4111 arg0_type, low, 0, arg1, 0);
4112 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4113 arg0_type, high, 1, arg1, 0);
4114 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4115 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4116 return NULL_TREE;
4118 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4119 *strict_overflow_p = true;
4121 normalize:
4122 /* Check for an unsigned range which has wrapped around the maximum
4123 value thus making n_high < n_low, and normalize it. */
4124 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4126 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4127 integer_one_node, 0);
4128 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4129 integer_one_node, 0);
4131 /* If the range is of the form +/- [ x+1, x ], we won't
4132 be able to normalize it. But then, it represents the
4133 whole range or the empty set, so make it
4134 +/- [ -, - ]. */
4135 if (tree_int_cst_equal (n_low, low)
4136 && tree_int_cst_equal (n_high, high))
4137 low = high = 0;
4138 else
4139 in_p = ! in_p;
4141 else
4142 low = n_low, high = n_high;
4144 *p_low = low;
4145 *p_high = high;
4146 *p_in_p = in_p;
4147 return arg0;
4149 CASE_CONVERT:
4150 case NON_LVALUE_EXPR:
4151 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4152 return NULL_TREE;
4154 if (! INTEGRAL_TYPE_P (arg0_type)
4155 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4156 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4157 return NULL_TREE;
4159 n_low = low, n_high = high;
4161 if (n_low != 0)
4162 n_low = fold_convert_loc (loc, arg0_type, n_low);
4164 if (n_high != 0)
4165 n_high = fold_convert_loc (loc, arg0_type, n_high);
4167 /* If we're converting arg0 from an unsigned type, to exp,
4168 a signed type, we will be doing the comparison as unsigned.
4169 The tests above have already verified that LOW and HIGH
4170 are both positive.
4172 So we have to ensure that we will handle large unsigned
4173 values the same way that the current signed bounds treat
4174 negative values. */
4176 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4178 tree high_positive;
4179 tree equiv_type;
4180 /* For fixed-point modes, we need to pass the saturating flag
4181 as the 2nd parameter. */
4182 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4183 equiv_type
4184 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4185 TYPE_SATURATING (arg0_type));
4186 else
4187 equiv_type
4188 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4190 /* A range without an upper bound is, naturally, unbounded.
4191 Since convert would have cropped a very large value, use
4192 the max value for the destination type. */
4193 high_positive
4194 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4195 : TYPE_MAX_VALUE (arg0_type);
4197 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4198 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4199 fold_convert_loc (loc, arg0_type,
4200 high_positive),
4201 build_int_cst (arg0_type, 1));
4203 /* If the low bound is specified, "and" the range with the
4204 range for which the original unsigned value will be
4205 positive. */
4206 if (low != 0)
4208 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4209 1, fold_convert_loc (loc, arg0_type,
4210 integer_zero_node),
4211 high_positive))
4212 return NULL_TREE;
4214 in_p = (n_in_p == in_p);
4216 else
4218 /* Otherwise, "or" the range with the range of the input
4219 that will be interpreted as negative. */
4220 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4221 1, fold_convert_loc (loc, arg0_type,
4222 integer_zero_node),
4223 high_positive))
4224 return NULL_TREE;
4226 in_p = (in_p != n_in_p);
4230 *p_low = n_low;
4231 *p_high = n_high;
4232 *p_in_p = in_p;
4233 return arg0;
4235 default:
4236 return NULL_TREE;
4240 /* Given EXP, a logical expression, set the range it is testing into
4241 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4242 actually being tested. *PLOW and *PHIGH will be made of the same
4243 type as the returned expression. If EXP is not a comparison, we
4244 will most likely not be returning a useful value and range. Set
4245 *STRICT_OVERFLOW_P to true if the return value is only valid
4246 because signed overflow is undefined; otherwise, do not change
4247 *STRICT_OVERFLOW_P. */
4249 tree
4250 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4251 bool *strict_overflow_p)
4253 enum tree_code code;
4254 tree arg0, arg1 = NULL_TREE;
4255 tree exp_type, nexp;
4256 int in_p;
4257 tree low, high;
4258 location_t loc = EXPR_LOCATION (exp);
4260 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4261 and see if we can refine the range. Some of the cases below may not
4262 happen, but it doesn't seem worth worrying about this. We "continue"
4263 the outer loop when we've changed something; otherwise we "break"
4264 the switch, which will "break" the while. */
4266 in_p = 0;
4267 low = high = build_int_cst (TREE_TYPE (exp), 0);
4269 while (1)
4271 code = TREE_CODE (exp);
4272 exp_type = TREE_TYPE (exp);
4273 arg0 = NULL_TREE;
4275 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4277 if (TREE_OPERAND_LENGTH (exp) > 0)
4278 arg0 = TREE_OPERAND (exp, 0);
4279 if (TREE_CODE_CLASS (code) == tcc_binary
4280 || TREE_CODE_CLASS (code) == tcc_comparison
4281 || (TREE_CODE_CLASS (code) == tcc_expression
4282 && TREE_OPERAND_LENGTH (exp) > 1))
4283 arg1 = TREE_OPERAND (exp, 1);
4285 if (arg0 == NULL_TREE)
4286 break;
4288 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4289 &high, &in_p, strict_overflow_p);
4290 if (nexp == NULL_TREE)
4291 break;
4292 exp = nexp;
4295 /* If EXP is a constant, we can evaluate whether this is true or false. */
4296 if (TREE_CODE (exp) == INTEGER_CST)
4298 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4299 exp, 0, low, 0))
4300 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4301 exp, 1, high, 1)));
4302 low = high = 0;
4303 exp = 0;
4306 *pin_p = in_p, *plow = low, *phigh = high;
4307 return exp;
4310 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4311 type, TYPE, return an expression to test if EXP is in (or out of, depending
4312 on IN_P) the range. Return 0 if the test couldn't be created. */
4314 tree
4315 build_range_check (location_t loc, tree type, tree exp, int in_p,
4316 tree low, tree high)
4318 tree etype = TREE_TYPE (exp), value;
4320 #ifdef HAVE_canonicalize_funcptr_for_compare
4321 /* Disable this optimization for function pointer expressions
4322 on targets that require function pointer canonicalization. */
4323 if (HAVE_canonicalize_funcptr_for_compare
4324 && TREE_CODE (etype) == POINTER_TYPE
4325 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4326 return NULL_TREE;
4327 #endif
4329 if (! in_p)
4331 value = build_range_check (loc, type, exp, 1, low, high);
4332 if (value != 0)
4333 return invert_truthvalue_loc (loc, value);
4335 return 0;
4338 if (low == 0 && high == 0)
4339 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4341 if (low == 0)
4342 return fold_build2_loc (loc, LE_EXPR, type, exp,
4343 fold_convert_loc (loc, etype, high));
4345 if (high == 0)
4346 return fold_build2_loc (loc, GE_EXPR, type, exp,
4347 fold_convert_loc (loc, etype, low));
4349 if (operand_equal_p (low, high, 0))
4350 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4351 fold_convert_loc (loc, etype, low));
4353 if (integer_zerop (low))
4355 if (! TYPE_UNSIGNED (etype))
4357 etype = unsigned_type_for (etype);
4358 high = fold_convert_loc (loc, etype, high);
4359 exp = fold_convert_loc (loc, etype, exp);
4361 return build_range_check (loc, type, exp, 1, 0, high);
4364 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4365 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4367 unsigned HOST_WIDE_INT lo;
4368 HOST_WIDE_INT hi;
4369 int prec;
4371 prec = TYPE_PRECISION (etype);
4372 if (prec <= HOST_BITS_PER_WIDE_INT)
4374 hi = 0;
4375 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4377 else
4379 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4380 lo = HOST_WIDE_INT_M1U;
4383 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4385 if (TYPE_UNSIGNED (etype))
4387 tree signed_etype = signed_type_for (etype);
4388 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4389 etype
4390 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4391 else
4392 etype = signed_etype;
4393 exp = fold_convert_loc (loc, etype, exp);
4395 return fold_build2_loc (loc, GT_EXPR, type, exp,
4396 build_int_cst (etype, 0));
4400 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4401 This requires wrap-around arithmetics for the type of the expression.
4402 First make sure that arithmetics in this type is valid, then make sure
4403 that it wraps around. */
4404 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4405 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4406 TYPE_UNSIGNED (etype));
4408 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4410 tree utype, minv, maxv;
4412 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4413 for the type in question, as we rely on this here. */
4414 utype = unsigned_type_for (etype);
4415 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4416 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4417 integer_one_node, 1);
4418 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4420 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4421 minv, 1, maxv, 1)))
4422 etype = utype;
4423 else
4424 return 0;
4427 high = fold_convert_loc (loc, etype, high);
4428 low = fold_convert_loc (loc, etype, low);
4429 exp = fold_convert_loc (loc, etype, exp);
4431 value = const_binop (MINUS_EXPR, high, low);
4434 if (POINTER_TYPE_P (etype))
4436 if (value != 0 && !TREE_OVERFLOW (value))
4438 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4439 return build_range_check (loc, type,
4440 fold_build_pointer_plus_loc (loc, exp, low),
4441 1, build_int_cst (etype, 0), value);
4443 return 0;
4446 if (value != 0 && !TREE_OVERFLOW (value))
4447 return build_range_check (loc, type,
4448 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4449 1, build_int_cst (etype, 0), value);
4451 return 0;
4454 /* Return the predecessor of VAL in its type, handling the infinite case. */
4456 static tree
4457 range_predecessor (tree val)
4459 tree type = TREE_TYPE (val);
4461 if (INTEGRAL_TYPE_P (type)
4462 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4463 return 0;
4464 else
4465 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4468 /* Return the successor of VAL in its type, handling the infinite case. */
4470 static tree
4471 range_successor (tree val)
4473 tree type = TREE_TYPE (val);
4475 if (INTEGRAL_TYPE_P (type)
4476 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4477 return 0;
4478 else
4479 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4482 /* Given two ranges, see if we can merge them into one. Return 1 if we
4483 can, 0 if we can't. Set the output range into the specified parameters. */
4485 bool
4486 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4487 tree high0, int in1_p, tree low1, tree high1)
4489 int no_overlap;
4490 int subset;
4491 int temp;
4492 tree tem;
4493 int in_p;
4494 tree low, high;
4495 int lowequal = ((low0 == 0 && low1 == 0)
4496 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4497 low0, 0, low1, 0)));
4498 int highequal = ((high0 == 0 && high1 == 0)
4499 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4500 high0, 1, high1, 1)));
4502 /* Make range 0 be the range that starts first, or ends last if they
4503 start at the same value. Swap them if it isn't. */
4504 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4505 low0, 0, low1, 0))
4506 || (lowequal
4507 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4508 high1, 1, high0, 1))))
4510 temp = in0_p, in0_p = in1_p, in1_p = temp;
4511 tem = low0, low0 = low1, low1 = tem;
4512 tem = high0, high0 = high1, high1 = tem;
4515 /* Now flag two cases, whether the ranges are disjoint or whether the
4516 second range is totally subsumed in the first. Note that the tests
4517 below are simplified by the ones above. */
4518 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4519 high0, 1, low1, 0));
4520 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4521 high1, 1, high0, 1));
4523 /* We now have four cases, depending on whether we are including or
4524 excluding the two ranges. */
4525 if (in0_p && in1_p)
4527 /* If they don't overlap, the result is false. If the second range
4528 is a subset it is the result. Otherwise, the range is from the start
4529 of the second to the end of the first. */
4530 if (no_overlap)
4531 in_p = 0, low = high = 0;
4532 else if (subset)
4533 in_p = 1, low = low1, high = high1;
4534 else
4535 in_p = 1, low = low1, high = high0;
4538 else if (in0_p && ! in1_p)
4540 /* If they don't overlap, the result is the first range. If they are
4541 equal, the result is false. If the second range is a subset of the
4542 first, and the ranges begin at the same place, we go from just after
4543 the end of the second range to the end of the first. If the second
4544 range is not a subset of the first, or if it is a subset and both
4545 ranges end at the same place, the range starts at the start of the
4546 first range and ends just before the second range.
4547 Otherwise, we can't describe this as a single range. */
4548 if (no_overlap)
4549 in_p = 1, low = low0, high = high0;
4550 else if (lowequal && highequal)
4551 in_p = 0, low = high = 0;
4552 else if (subset && lowequal)
4554 low = range_successor (high1);
4555 high = high0;
4556 in_p = 1;
4557 if (low == 0)
4559 /* We are in the weird situation where high0 > high1 but
4560 high1 has no successor. Punt. */
4561 return 0;
4564 else if (! subset || highequal)
4566 low = low0;
4567 high = range_predecessor (low1);
4568 in_p = 1;
4569 if (high == 0)
4571 /* low0 < low1 but low1 has no predecessor. Punt. */
4572 return 0;
4575 else
4576 return 0;
4579 else if (! in0_p && in1_p)
4581 /* If they don't overlap, the result is the second range. If the second
4582 is a subset of the first, the result is false. Otherwise,
4583 the range starts just after the first range and ends at the
4584 end of the second. */
4585 if (no_overlap)
4586 in_p = 1, low = low1, high = high1;
4587 else if (subset || highequal)
4588 in_p = 0, low = high = 0;
4589 else
4591 low = range_successor (high0);
4592 high = high1;
4593 in_p = 1;
4594 if (low == 0)
4596 /* high1 > high0 but high0 has no successor. Punt. */
4597 return 0;
4602 else
4604 /* The case where we are excluding both ranges. Here the complex case
4605 is if they don't overlap. In that case, the only time we have a
4606 range is if they are adjacent. If the second is a subset of the
4607 first, the result is the first. Otherwise, the range to exclude
4608 starts at the beginning of the first range and ends at the end of the
4609 second. */
4610 if (no_overlap)
4612 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4613 range_successor (high0),
4614 1, low1, 0)))
4615 in_p = 0, low = low0, high = high1;
4616 else
4618 /* Canonicalize - [min, x] into - [-, x]. */
4619 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4620 switch (TREE_CODE (TREE_TYPE (low0)))
4622 case ENUMERAL_TYPE:
4623 if (TYPE_PRECISION (TREE_TYPE (low0))
4624 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4625 break;
4626 /* FALLTHROUGH */
4627 case INTEGER_TYPE:
4628 if (tree_int_cst_equal (low0,
4629 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4630 low0 = 0;
4631 break;
4632 case POINTER_TYPE:
4633 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4634 && integer_zerop (low0))
4635 low0 = 0;
4636 break;
4637 default:
4638 break;
4641 /* Canonicalize - [x, max] into - [x, -]. */
4642 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4643 switch (TREE_CODE (TREE_TYPE (high1)))
4645 case ENUMERAL_TYPE:
4646 if (TYPE_PRECISION (TREE_TYPE (high1))
4647 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4648 break;
4649 /* FALLTHROUGH */
4650 case INTEGER_TYPE:
4651 if (tree_int_cst_equal (high1,
4652 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4653 high1 = 0;
4654 break;
4655 case POINTER_TYPE:
4656 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4657 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4658 high1, 1,
4659 integer_one_node, 1)))
4660 high1 = 0;
4661 break;
4662 default:
4663 break;
4666 /* The ranges might be also adjacent between the maximum and
4667 minimum values of the given type. For
4668 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4669 return + [x + 1, y - 1]. */
4670 if (low0 == 0 && high1 == 0)
4672 low = range_successor (high0);
4673 high = range_predecessor (low1);
4674 if (low == 0 || high == 0)
4675 return 0;
4677 in_p = 1;
4679 else
4680 return 0;
4683 else if (subset)
4684 in_p = 0, low = low0, high = high0;
4685 else
4686 in_p = 0, low = low0, high = high1;
4689 *pin_p = in_p, *plow = low, *phigh = high;
4690 return 1;
4694 /* Subroutine of fold, looking inside expressions of the form
4695 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4696 of the COND_EXPR. This function is being used also to optimize
4697 A op B ? C : A, by reversing the comparison first.
4699 Return a folded expression whose code is not a COND_EXPR
4700 anymore, or NULL_TREE if no folding opportunity is found. */
4702 static tree
4703 fold_cond_expr_with_comparison (location_t loc, tree type,
4704 tree arg0, tree arg1, tree arg2)
4706 enum tree_code comp_code = TREE_CODE (arg0);
4707 tree arg00 = TREE_OPERAND (arg0, 0);
4708 tree arg01 = TREE_OPERAND (arg0, 1);
4709 tree arg1_type = TREE_TYPE (arg1);
4710 tree tem;
4712 STRIP_NOPS (arg1);
4713 STRIP_NOPS (arg2);
4715 /* If we have A op 0 ? A : -A, consider applying the following
4716 transformations:
4718 A == 0? A : -A same as -A
4719 A != 0? A : -A same as A
4720 A >= 0? A : -A same as abs (A)
4721 A > 0? A : -A same as abs (A)
4722 A <= 0? A : -A same as -abs (A)
4723 A < 0? A : -A same as -abs (A)
4725 None of these transformations work for modes with signed
4726 zeros. If A is +/-0, the first two transformations will
4727 change the sign of the result (from +0 to -0, or vice
4728 versa). The last four will fix the sign of the result,
4729 even though the original expressions could be positive or
4730 negative, depending on the sign of A.
4732 Note that all these transformations are correct if A is
4733 NaN, since the two alternatives (A and -A) are also NaNs. */
4734 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4735 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4736 ? real_zerop (arg01)
4737 : integer_zerop (arg01))
4738 && ((TREE_CODE (arg2) == NEGATE_EXPR
4739 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4740 /* In the case that A is of the form X-Y, '-A' (arg2) may
4741 have already been folded to Y-X, check for that. */
4742 || (TREE_CODE (arg1) == MINUS_EXPR
4743 && TREE_CODE (arg2) == MINUS_EXPR
4744 && operand_equal_p (TREE_OPERAND (arg1, 0),
4745 TREE_OPERAND (arg2, 1), 0)
4746 && operand_equal_p (TREE_OPERAND (arg1, 1),
4747 TREE_OPERAND (arg2, 0), 0))))
4748 switch (comp_code)
4750 case EQ_EXPR:
4751 case UNEQ_EXPR:
4752 tem = fold_convert_loc (loc, arg1_type, arg1);
4753 return pedantic_non_lvalue_loc (loc,
4754 fold_convert_loc (loc, type,
4755 negate_expr (tem)));
4756 case NE_EXPR:
4757 case LTGT_EXPR:
4758 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4759 case UNGE_EXPR:
4760 case UNGT_EXPR:
4761 if (flag_trapping_math)
4762 break;
4763 /* Fall through. */
4764 case GE_EXPR:
4765 case GT_EXPR:
4766 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4767 arg1 = fold_convert_loc (loc, signed_type_for
4768 (TREE_TYPE (arg1)), arg1);
4769 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4770 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4771 case UNLE_EXPR:
4772 case UNLT_EXPR:
4773 if (flag_trapping_math)
4774 break;
4775 case LE_EXPR:
4776 case LT_EXPR:
4777 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4778 arg1 = fold_convert_loc (loc, signed_type_for
4779 (TREE_TYPE (arg1)), arg1);
4780 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4781 return negate_expr (fold_convert_loc (loc, type, tem));
4782 default:
4783 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4784 break;
4787 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4788 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4789 both transformations are correct when A is NaN: A != 0
4790 is then true, and A == 0 is false. */
4792 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4793 && integer_zerop (arg01) && integer_zerop (arg2))
4795 if (comp_code == NE_EXPR)
4796 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4797 else if (comp_code == EQ_EXPR)
4798 return build_zero_cst (type);
4801 /* Try some transformations of A op B ? A : B.
4803 A == B? A : B same as B
4804 A != B? A : B same as A
4805 A >= B? A : B same as max (A, B)
4806 A > B? A : B same as max (B, A)
4807 A <= B? A : B same as min (A, B)
4808 A < B? A : B same as min (B, A)
4810 As above, these transformations don't work in the presence
4811 of signed zeros. For example, if A and B are zeros of
4812 opposite sign, the first two transformations will change
4813 the sign of the result. In the last four, the original
4814 expressions give different results for (A=+0, B=-0) and
4815 (A=-0, B=+0), but the transformed expressions do not.
4817 The first two transformations are correct if either A or B
4818 is a NaN. In the first transformation, the condition will
4819 be false, and B will indeed be chosen. In the case of the
4820 second transformation, the condition A != B will be true,
4821 and A will be chosen.
4823 The conversions to max() and min() are not correct if B is
4824 a number and A is not. The conditions in the original
4825 expressions will be false, so all four give B. The min()
4826 and max() versions would give a NaN instead. */
4827 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4828 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4829 /* Avoid these transformations if the COND_EXPR may be used
4830 as an lvalue in the C++ front-end. PR c++/19199. */
4831 && (in_gimple_form
4832 || VECTOR_TYPE_P (type)
4833 || (strcmp (lang_hooks.name, "GNU C++") != 0
4834 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4835 || ! maybe_lvalue_p (arg1)
4836 || ! maybe_lvalue_p (arg2)))
4838 tree comp_op0 = arg00;
4839 tree comp_op1 = arg01;
4840 tree comp_type = TREE_TYPE (comp_op0);
4842 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4843 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4845 comp_type = type;
4846 comp_op0 = arg1;
4847 comp_op1 = arg2;
4850 switch (comp_code)
4852 case EQ_EXPR:
4853 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4854 case NE_EXPR:
4855 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4856 case LE_EXPR:
4857 case LT_EXPR:
4858 case UNLE_EXPR:
4859 case UNLT_EXPR:
4860 /* In C++ a ?: expression can be an lvalue, so put the
4861 operand which will be used if they are equal first
4862 so that we can convert this back to the
4863 corresponding COND_EXPR. */
4864 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4866 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4867 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4868 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4869 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4870 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4871 comp_op1, comp_op0);
4872 return pedantic_non_lvalue_loc (loc,
4873 fold_convert_loc (loc, type, tem));
4875 break;
4876 case GE_EXPR:
4877 case GT_EXPR:
4878 case UNGE_EXPR:
4879 case UNGT_EXPR:
4880 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4882 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4883 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4884 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4885 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4886 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4887 comp_op1, comp_op0);
4888 return pedantic_non_lvalue_loc (loc,
4889 fold_convert_loc (loc, type, tem));
4891 break;
4892 case UNEQ_EXPR:
4893 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4894 return pedantic_non_lvalue_loc (loc,
4895 fold_convert_loc (loc, type, arg2));
4896 break;
4897 case LTGT_EXPR:
4898 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4899 return pedantic_non_lvalue_loc (loc,
4900 fold_convert_loc (loc, type, arg1));
4901 break;
4902 default:
4903 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4904 break;
4908 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4909 we might still be able to simplify this. For example,
4910 if C1 is one less or one more than C2, this might have started
4911 out as a MIN or MAX and been transformed by this function.
4912 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4914 if (INTEGRAL_TYPE_P (type)
4915 && TREE_CODE (arg01) == INTEGER_CST
4916 && TREE_CODE (arg2) == INTEGER_CST)
4917 switch (comp_code)
4919 case EQ_EXPR:
4920 if (TREE_CODE (arg1) == INTEGER_CST)
4921 break;
4922 /* We can replace A with C1 in this case. */
4923 arg1 = fold_convert_loc (loc, type, arg01);
4924 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4926 case LT_EXPR:
4927 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4928 MIN_EXPR, to preserve the signedness of the comparison. */
4929 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4930 OEP_ONLY_CONST)
4931 && operand_equal_p (arg01,
4932 const_binop (PLUS_EXPR, arg2,
4933 build_int_cst (type, 1)),
4934 OEP_ONLY_CONST))
4936 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4937 fold_convert_loc (loc, TREE_TYPE (arg00),
4938 arg2));
4939 return pedantic_non_lvalue_loc (loc,
4940 fold_convert_loc (loc, type, tem));
4942 break;
4944 case LE_EXPR:
4945 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4946 as above. */
4947 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4948 OEP_ONLY_CONST)
4949 && operand_equal_p (arg01,
4950 const_binop (MINUS_EXPR, arg2,
4951 build_int_cst (type, 1)),
4952 OEP_ONLY_CONST))
4954 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4955 fold_convert_loc (loc, TREE_TYPE (arg00),
4956 arg2));
4957 return pedantic_non_lvalue_loc (loc,
4958 fold_convert_loc (loc, type, tem));
4960 break;
4962 case GT_EXPR:
4963 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4964 MAX_EXPR, to preserve the signedness of the comparison. */
4965 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4966 OEP_ONLY_CONST)
4967 && operand_equal_p (arg01,
4968 const_binop (MINUS_EXPR, arg2,
4969 build_int_cst (type, 1)),
4970 OEP_ONLY_CONST))
4972 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4973 fold_convert_loc (loc, TREE_TYPE (arg00),
4974 arg2));
4975 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4977 break;
4979 case GE_EXPR:
4980 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4981 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4982 OEP_ONLY_CONST)
4983 && operand_equal_p (arg01,
4984 const_binop (PLUS_EXPR, arg2,
4985 build_int_cst (type, 1)),
4986 OEP_ONLY_CONST))
4988 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4989 fold_convert_loc (loc, TREE_TYPE (arg00),
4990 arg2));
4991 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4993 break;
4994 case NE_EXPR:
4995 break;
4996 default:
4997 gcc_unreachable ();
5000 return NULL_TREE;
5005 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5006 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5007 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5008 false) >= 2)
5009 #endif
5011 /* EXP is some logical combination of boolean tests. See if we can
5012 merge it into some range test. Return the new tree if so. */
5014 static tree
5015 fold_range_test (location_t loc, enum tree_code code, tree type,
5016 tree op0, tree op1)
5018 int or_op = (code == TRUTH_ORIF_EXPR
5019 || code == TRUTH_OR_EXPR);
5020 int in0_p, in1_p, in_p;
5021 tree low0, low1, low, high0, high1, high;
5022 bool strict_overflow_p = false;
5023 tree tem, lhs, rhs;
5024 const char * const warnmsg = G_("assuming signed overflow does not occur "
5025 "when simplifying range test");
5027 if (!INTEGRAL_TYPE_P (type))
5028 return 0;
5030 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5031 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5033 /* If this is an OR operation, invert both sides; we will invert
5034 again at the end. */
5035 if (or_op)
5036 in0_p = ! in0_p, in1_p = ! in1_p;
5038 /* If both expressions are the same, if we can merge the ranges, and we
5039 can build the range test, return it or it inverted. If one of the
5040 ranges is always true or always false, consider it to be the same
5041 expression as the other. */
5042 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5043 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5044 in1_p, low1, high1)
5045 && 0 != (tem = (build_range_check (loc, type,
5046 lhs != 0 ? lhs
5047 : rhs != 0 ? rhs : integer_zero_node,
5048 in_p, low, high))))
5050 if (strict_overflow_p)
5051 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5052 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5055 /* On machines where the branch cost is expensive, if this is a
5056 short-circuited branch and the underlying object on both sides
5057 is the same, make a non-short-circuit operation. */
5058 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5059 && lhs != 0 && rhs != 0
5060 && (code == TRUTH_ANDIF_EXPR
5061 || code == TRUTH_ORIF_EXPR)
5062 && operand_equal_p (lhs, rhs, 0))
5064 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5065 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5066 which cases we can't do this. */
5067 if (simple_operand_p (lhs))
5068 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5069 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5070 type, op0, op1);
5072 else if (!lang_hooks.decls.global_bindings_p ()
5073 && !CONTAINS_PLACEHOLDER_P (lhs))
5075 tree common = save_expr (lhs);
5077 if (0 != (lhs = build_range_check (loc, type, common,
5078 or_op ? ! in0_p : in0_p,
5079 low0, high0))
5080 && (0 != (rhs = build_range_check (loc, type, common,
5081 or_op ? ! in1_p : in1_p,
5082 low1, high1))))
5084 if (strict_overflow_p)
5085 fold_overflow_warning (warnmsg,
5086 WARN_STRICT_OVERFLOW_COMPARISON);
5087 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5088 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5089 type, lhs, rhs);
5094 return 0;
5097 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5098 bit value. Arrange things so the extra bits will be set to zero if and
5099 only if C is signed-extended to its full width. If MASK is nonzero,
5100 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5102 static tree
5103 unextend (tree c, int p, int unsignedp, tree mask)
5105 tree type = TREE_TYPE (c);
5106 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5107 tree temp;
5109 if (p == modesize || unsignedp)
5110 return c;
5112 /* We work by getting just the sign bit into the low-order bit, then
5113 into the high-order bit, then sign-extend. We then XOR that value
5114 with C. */
5115 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5116 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5118 /* We must use a signed type in order to get an arithmetic right shift.
5119 However, we must also avoid introducing accidental overflows, so that
5120 a subsequent call to integer_zerop will work. Hence we must
5121 do the type conversion here. At this point, the constant is either
5122 zero or one, and the conversion to a signed type can never overflow.
5123 We could get an overflow if this conversion is done anywhere else. */
5124 if (TYPE_UNSIGNED (type))
5125 temp = fold_convert (signed_type_for (type), temp);
5127 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5128 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5129 if (mask != 0)
5130 temp = const_binop (BIT_AND_EXPR, temp,
5131 fold_convert (TREE_TYPE (c), mask));
5132 /* If necessary, convert the type back to match the type of C. */
5133 if (TYPE_UNSIGNED (type))
5134 temp = fold_convert (type, temp);
5136 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5139 /* For an expression that has the form
5140 (A && B) || ~B
5142 (A || B) && ~B,
5143 we can drop one of the inner expressions and simplify to
5144 A || ~B
5146 A && ~B
5147 LOC is the location of the resulting expression. OP is the inner
5148 logical operation; the left-hand side in the examples above, while CMPOP
5149 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5150 removing a condition that guards another, as in
5151 (A != NULL && A->...) || A == NULL
5152 which we must not transform. If RHS_ONLY is true, only eliminate the
5153 right-most operand of the inner logical operation. */
5155 static tree
5156 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5157 bool rhs_only)
5159 tree type = TREE_TYPE (cmpop);
5160 enum tree_code code = TREE_CODE (cmpop);
5161 enum tree_code truthop_code = TREE_CODE (op);
5162 tree lhs = TREE_OPERAND (op, 0);
5163 tree rhs = TREE_OPERAND (op, 1);
5164 tree orig_lhs = lhs, orig_rhs = rhs;
5165 enum tree_code rhs_code = TREE_CODE (rhs);
5166 enum tree_code lhs_code = TREE_CODE (lhs);
5167 enum tree_code inv_code;
5169 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5170 return NULL_TREE;
5172 if (TREE_CODE_CLASS (code) != tcc_comparison)
5173 return NULL_TREE;
5175 if (rhs_code == truthop_code)
5177 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5178 if (newrhs != NULL_TREE)
5180 rhs = newrhs;
5181 rhs_code = TREE_CODE (rhs);
5184 if (lhs_code == truthop_code && !rhs_only)
5186 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5187 if (newlhs != NULL_TREE)
5189 lhs = newlhs;
5190 lhs_code = TREE_CODE (lhs);
5194 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5195 if (inv_code == rhs_code
5196 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5197 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5198 return lhs;
5199 if (!rhs_only && inv_code == lhs_code
5200 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5201 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5202 return rhs;
5203 if (rhs != orig_rhs || lhs != orig_lhs)
5204 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5205 lhs, rhs);
5206 return NULL_TREE;
5209 /* Find ways of folding logical expressions of LHS and RHS:
5210 Try to merge two comparisons to the same innermost item.
5211 Look for range tests like "ch >= '0' && ch <= '9'".
5212 Look for combinations of simple terms on machines with expensive branches
5213 and evaluate the RHS unconditionally.
5215 For example, if we have p->a == 2 && p->b == 4 and we can make an
5216 object large enough to span both A and B, we can do this with a comparison
5217 against the object ANDed with the a mask.
5219 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5220 operations to do this with one comparison.
5222 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5223 function and the one above.
5225 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5226 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5228 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5229 two operands.
5231 We return the simplified tree or 0 if no optimization is possible. */
5233 static tree
5234 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5235 tree lhs, tree rhs)
5237 /* If this is the "or" of two comparisons, we can do something if
5238 the comparisons are NE_EXPR. If this is the "and", we can do something
5239 if the comparisons are EQ_EXPR. I.e.,
5240 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5242 WANTED_CODE is this operation code. For single bit fields, we can
5243 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5244 comparison for one-bit fields. */
5246 enum tree_code wanted_code;
5247 enum tree_code lcode, rcode;
5248 tree ll_arg, lr_arg, rl_arg, rr_arg;
5249 tree ll_inner, lr_inner, rl_inner, rr_inner;
5250 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5251 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5252 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5253 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5254 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5255 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5256 enum machine_mode lnmode, rnmode;
5257 tree ll_mask, lr_mask, rl_mask, rr_mask;
5258 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5259 tree l_const, r_const;
5260 tree lntype, rntype, result;
5261 HOST_WIDE_INT first_bit, end_bit;
5262 int volatilep;
5264 /* Start by getting the comparison codes. Fail if anything is volatile.
5265 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5266 it were surrounded with a NE_EXPR. */
5268 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5269 return 0;
5271 lcode = TREE_CODE (lhs);
5272 rcode = TREE_CODE (rhs);
5274 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5276 lhs = build2 (NE_EXPR, truth_type, lhs,
5277 build_int_cst (TREE_TYPE (lhs), 0));
5278 lcode = NE_EXPR;
5281 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5283 rhs = build2 (NE_EXPR, truth_type, rhs,
5284 build_int_cst (TREE_TYPE (rhs), 0));
5285 rcode = NE_EXPR;
5288 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5289 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5290 return 0;
5292 ll_arg = TREE_OPERAND (lhs, 0);
5293 lr_arg = TREE_OPERAND (lhs, 1);
5294 rl_arg = TREE_OPERAND (rhs, 0);
5295 rr_arg = TREE_OPERAND (rhs, 1);
5297 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5298 if (simple_operand_p (ll_arg)
5299 && simple_operand_p (lr_arg))
5301 if (operand_equal_p (ll_arg, rl_arg, 0)
5302 && operand_equal_p (lr_arg, rr_arg, 0))
5304 result = combine_comparisons (loc, code, lcode, rcode,
5305 truth_type, ll_arg, lr_arg);
5306 if (result)
5307 return result;
5309 else if (operand_equal_p (ll_arg, rr_arg, 0)
5310 && operand_equal_p (lr_arg, rl_arg, 0))
5312 result = combine_comparisons (loc, code, lcode,
5313 swap_tree_comparison (rcode),
5314 truth_type, ll_arg, lr_arg);
5315 if (result)
5316 return result;
5320 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5321 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5323 /* If the RHS can be evaluated unconditionally and its operands are
5324 simple, it wins to evaluate the RHS unconditionally on machines
5325 with expensive branches. In this case, this isn't a comparison
5326 that can be merged. */
5328 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5329 false) >= 2
5330 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5331 && simple_operand_p (rl_arg)
5332 && simple_operand_p (rr_arg))
5334 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5335 if (code == TRUTH_OR_EXPR
5336 && lcode == NE_EXPR && integer_zerop (lr_arg)
5337 && rcode == NE_EXPR && integer_zerop (rr_arg)
5338 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5339 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5340 return build2_loc (loc, NE_EXPR, truth_type,
5341 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5342 ll_arg, rl_arg),
5343 build_int_cst (TREE_TYPE (ll_arg), 0));
5345 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5346 if (code == TRUTH_AND_EXPR
5347 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5348 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5349 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5350 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5351 return build2_loc (loc, EQ_EXPR, truth_type,
5352 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5353 ll_arg, rl_arg),
5354 build_int_cst (TREE_TYPE (ll_arg), 0));
5357 /* See if the comparisons can be merged. Then get all the parameters for
5358 each side. */
5360 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5361 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5362 return 0;
5364 volatilep = 0;
5365 ll_inner = decode_field_reference (loc, ll_arg,
5366 &ll_bitsize, &ll_bitpos, &ll_mode,
5367 &ll_unsignedp, &volatilep, &ll_mask,
5368 &ll_and_mask);
5369 lr_inner = decode_field_reference (loc, lr_arg,
5370 &lr_bitsize, &lr_bitpos, &lr_mode,
5371 &lr_unsignedp, &volatilep, &lr_mask,
5372 &lr_and_mask);
5373 rl_inner = decode_field_reference (loc, rl_arg,
5374 &rl_bitsize, &rl_bitpos, &rl_mode,
5375 &rl_unsignedp, &volatilep, &rl_mask,
5376 &rl_and_mask);
5377 rr_inner = decode_field_reference (loc, rr_arg,
5378 &rr_bitsize, &rr_bitpos, &rr_mode,
5379 &rr_unsignedp, &volatilep, &rr_mask,
5380 &rr_and_mask);
5382 /* It must be true that the inner operation on the lhs of each
5383 comparison must be the same if we are to be able to do anything.
5384 Then see if we have constants. If not, the same must be true for
5385 the rhs's. */
5386 if (volatilep || ll_inner == 0 || rl_inner == 0
5387 || ! operand_equal_p (ll_inner, rl_inner, 0))
5388 return 0;
5390 if (TREE_CODE (lr_arg) == INTEGER_CST
5391 && TREE_CODE (rr_arg) == INTEGER_CST)
5392 l_const = lr_arg, r_const = rr_arg;
5393 else if (lr_inner == 0 || rr_inner == 0
5394 || ! operand_equal_p (lr_inner, rr_inner, 0))
5395 return 0;
5396 else
5397 l_const = r_const = 0;
5399 /* If either comparison code is not correct for our logical operation,
5400 fail. However, we can convert a one-bit comparison against zero into
5401 the opposite comparison against that bit being set in the field. */
5403 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5404 if (lcode != wanted_code)
5406 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5408 /* Make the left operand unsigned, since we are only interested
5409 in the value of one bit. Otherwise we are doing the wrong
5410 thing below. */
5411 ll_unsignedp = 1;
5412 l_const = ll_mask;
5414 else
5415 return 0;
5418 /* This is analogous to the code for l_const above. */
5419 if (rcode != wanted_code)
5421 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5423 rl_unsignedp = 1;
5424 r_const = rl_mask;
5426 else
5427 return 0;
5430 /* See if we can find a mode that contains both fields being compared on
5431 the left. If we can't, fail. Otherwise, update all constants and masks
5432 to be relative to a field of that size. */
5433 first_bit = MIN (ll_bitpos, rl_bitpos);
5434 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5435 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5436 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5437 volatilep);
5438 if (lnmode == VOIDmode)
5439 return 0;
5441 lnbitsize = GET_MODE_BITSIZE (lnmode);
5442 lnbitpos = first_bit & ~ (lnbitsize - 1);
5443 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5444 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5446 if (BYTES_BIG_ENDIAN)
5448 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5449 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5452 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5453 size_int (xll_bitpos));
5454 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5455 size_int (xrl_bitpos));
5457 if (l_const)
5459 l_const = fold_convert_loc (loc, lntype, l_const);
5460 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5461 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5462 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5463 fold_build1_loc (loc, BIT_NOT_EXPR,
5464 lntype, ll_mask))))
5466 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5468 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5471 if (r_const)
5473 r_const = fold_convert_loc (loc, lntype, r_const);
5474 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5475 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5476 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5477 fold_build1_loc (loc, BIT_NOT_EXPR,
5478 lntype, rl_mask))))
5480 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5482 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5486 /* If the right sides are not constant, do the same for it. Also,
5487 disallow this optimization if a size or signedness mismatch occurs
5488 between the left and right sides. */
5489 if (l_const == 0)
5491 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5492 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5493 /* Make sure the two fields on the right
5494 correspond to the left without being swapped. */
5495 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5496 return 0;
5498 first_bit = MIN (lr_bitpos, rr_bitpos);
5499 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5500 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5501 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5502 volatilep);
5503 if (rnmode == VOIDmode)
5504 return 0;
5506 rnbitsize = GET_MODE_BITSIZE (rnmode);
5507 rnbitpos = first_bit & ~ (rnbitsize - 1);
5508 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5509 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5511 if (BYTES_BIG_ENDIAN)
5513 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5514 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5517 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5518 rntype, lr_mask),
5519 size_int (xlr_bitpos));
5520 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5521 rntype, rr_mask),
5522 size_int (xrr_bitpos));
5524 /* Make a mask that corresponds to both fields being compared.
5525 Do this for both items being compared. If the operands are the
5526 same size and the bits being compared are in the same position
5527 then we can do this by masking both and comparing the masked
5528 results. */
5529 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5530 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5531 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5533 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5534 ll_unsignedp || rl_unsignedp);
5535 if (! all_ones_mask_p (ll_mask, lnbitsize))
5536 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5538 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5539 lr_unsignedp || rr_unsignedp);
5540 if (! all_ones_mask_p (lr_mask, rnbitsize))
5541 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5543 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5546 /* There is still another way we can do something: If both pairs of
5547 fields being compared are adjacent, we may be able to make a wider
5548 field containing them both.
5550 Note that we still must mask the lhs/rhs expressions. Furthermore,
5551 the mask must be shifted to account for the shift done by
5552 make_bit_field_ref. */
5553 if ((ll_bitsize + ll_bitpos == rl_bitpos
5554 && lr_bitsize + lr_bitpos == rr_bitpos)
5555 || (ll_bitpos == rl_bitpos + rl_bitsize
5556 && lr_bitpos == rr_bitpos + rr_bitsize))
5558 tree type;
5560 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5561 ll_bitsize + rl_bitsize,
5562 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5563 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5564 lr_bitsize + rr_bitsize,
5565 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5567 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5568 size_int (MIN (xll_bitpos, xrl_bitpos)));
5569 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5570 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5572 /* Convert to the smaller type before masking out unwanted bits. */
5573 type = lntype;
5574 if (lntype != rntype)
5576 if (lnbitsize > rnbitsize)
5578 lhs = fold_convert_loc (loc, rntype, lhs);
5579 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5580 type = rntype;
5582 else if (lnbitsize < rnbitsize)
5584 rhs = fold_convert_loc (loc, lntype, rhs);
5585 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5586 type = lntype;
5590 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5591 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5593 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5594 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5596 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5599 return 0;
5602 /* Handle the case of comparisons with constants. If there is something in
5603 common between the masks, those bits of the constants must be the same.
5604 If not, the condition is always false. Test for this to avoid generating
5605 incorrect code below. */
5606 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5607 if (! integer_zerop (result)
5608 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5609 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5611 if (wanted_code == NE_EXPR)
5613 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5614 return constant_boolean_node (true, truth_type);
5616 else
5618 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5619 return constant_boolean_node (false, truth_type);
5623 /* Construct the expression we will return. First get the component
5624 reference we will make. Unless the mask is all ones the width of
5625 that field, perform the mask operation. Then compare with the
5626 merged constant. */
5627 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5628 ll_unsignedp || rl_unsignedp);
5630 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5631 if (! all_ones_mask_p (ll_mask, lnbitsize))
5632 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5634 return build2_loc (loc, wanted_code, truth_type, result,
5635 const_binop (BIT_IOR_EXPR, l_const, r_const));
5638 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5639 constant. */
5641 static tree
5642 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5643 tree op0, tree op1)
5645 tree arg0 = op0;
5646 enum tree_code op_code;
5647 tree comp_const;
5648 tree minmax_const;
5649 int consts_equal, consts_lt;
5650 tree inner;
5652 STRIP_SIGN_NOPS (arg0);
5654 op_code = TREE_CODE (arg0);
5655 minmax_const = TREE_OPERAND (arg0, 1);
5656 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5657 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5658 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5659 inner = TREE_OPERAND (arg0, 0);
5661 /* If something does not permit us to optimize, return the original tree. */
5662 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5663 || TREE_CODE (comp_const) != INTEGER_CST
5664 || TREE_OVERFLOW (comp_const)
5665 || TREE_CODE (minmax_const) != INTEGER_CST
5666 || TREE_OVERFLOW (minmax_const))
5667 return NULL_TREE;
5669 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5670 and GT_EXPR, doing the rest with recursive calls using logical
5671 simplifications. */
5672 switch (code)
5674 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5676 tree tem
5677 = optimize_minmax_comparison (loc,
5678 invert_tree_comparison (code, false),
5679 type, op0, op1);
5680 if (tem)
5681 return invert_truthvalue_loc (loc, tem);
5682 return NULL_TREE;
5685 case GE_EXPR:
5686 return
5687 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5688 optimize_minmax_comparison
5689 (loc, EQ_EXPR, type, arg0, comp_const),
5690 optimize_minmax_comparison
5691 (loc, GT_EXPR, type, arg0, comp_const));
5693 case EQ_EXPR:
5694 if (op_code == MAX_EXPR && consts_equal)
5695 /* MAX (X, 0) == 0 -> X <= 0 */
5696 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5698 else if (op_code == MAX_EXPR && consts_lt)
5699 /* MAX (X, 0) == 5 -> X == 5 */
5700 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5702 else if (op_code == MAX_EXPR)
5703 /* MAX (X, 0) == -1 -> false */
5704 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5706 else if (consts_equal)
5707 /* MIN (X, 0) == 0 -> X >= 0 */
5708 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5710 else if (consts_lt)
5711 /* MIN (X, 0) == 5 -> false */
5712 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5714 else
5715 /* MIN (X, 0) == -1 -> X == -1 */
5716 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5718 case GT_EXPR:
5719 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5720 /* MAX (X, 0) > 0 -> X > 0
5721 MAX (X, 0) > 5 -> X > 5 */
5722 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5724 else if (op_code == MAX_EXPR)
5725 /* MAX (X, 0) > -1 -> true */
5726 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5728 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5729 /* MIN (X, 0) > 0 -> false
5730 MIN (X, 0) > 5 -> false */
5731 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5733 else
5734 /* MIN (X, 0) > -1 -> X > -1 */
5735 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5737 default:
5738 return NULL_TREE;
5742 /* T is an integer expression that is being multiplied, divided, or taken a
5743 modulus (CODE says which and what kind of divide or modulus) by a
5744 constant C. See if we can eliminate that operation by folding it with
5745 other operations already in T. WIDE_TYPE, if non-null, is a type that
5746 should be used for the computation if wider than our type.
5748 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5749 (X * 2) + (Y * 4). We must, however, be assured that either the original
5750 expression would not overflow or that overflow is undefined for the type
5751 in the language in question.
5753 If we return a non-null expression, it is an equivalent form of the
5754 original computation, but need not be in the original type.
5756 We set *STRICT_OVERFLOW_P to true if the return values depends on
5757 signed overflow being undefined. Otherwise we do not change
5758 *STRICT_OVERFLOW_P. */
5760 static tree
5761 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5762 bool *strict_overflow_p)
5764 /* To avoid exponential search depth, refuse to allow recursion past
5765 three levels. Beyond that (1) it's highly unlikely that we'll find
5766 something interesting and (2) we've probably processed it before
5767 when we built the inner expression. */
5769 static int depth;
5770 tree ret;
5772 if (depth > 3)
5773 return NULL;
5775 depth++;
5776 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5777 depth--;
5779 return ret;
5782 static tree
5783 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5784 bool *strict_overflow_p)
5786 tree type = TREE_TYPE (t);
5787 enum tree_code tcode = TREE_CODE (t);
5788 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5789 > GET_MODE_SIZE (TYPE_MODE (type)))
5790 ? wide_type : type);
5791 tree t1, t2;
5792 int same_p = tcode == code;
5793 tree op0 = NULL_TREE, op1 = NULL_TREE;
5794 bool sub_strict_overflow_p;
5796 /* Don't deal with constants of zero here; they confuse the code below. */
5797 if (integer_zerop (c))
5798 return NULL_TREE;
5800 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5801 op0 = TREE_OPERAND (t, 0);
5803 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5804 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5806 /* Note that we need not handle conditional operations here since fold
5807 already handles those cases. So just do arithmetic here. */
5808 switch (tcode)
5810 case INTEGER_CST:
5811 /* For a constant, we can always simplify if we are a multiply
5812 or (for divide and modulus) if it is a multiple of our constant. */
5813 if (code == MULT_EXPR
5814 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5815 return const_binop (code, fold_convert (ctype, t),
5816 fold_convert (ctype, c));
5817 break;
5819 CASE_CONVERT: case NON_LVALUE_EXPR:
5820 /* If op0 is an expression ... */
5821 if ((COMPARISON_CLASS_P (op0)
5822 || UNARY_CLASS_P (op0)
5823 || BINARY_CLASS_P (op0)
5824 || VL_EXP_CLASS_P (op0)
5825 || EXPRESSION_CLASS_P (op0))
5826 /* ... and has wrapping overflow, and its type is smaller
5827 than ctype, then we cannot pass through as widening. */
5828 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5829 && (TYPE_PRECISION (ctype)
5830 > TYPE_PRECISION (TREE_TYPE (op0))))
5831 /* ... or this is a truncation (t is narrower than op0),
5832 then we cannot pass through this narrowing. */
5833 || (TYPE_PRECISION (type)
5834 < TYPE_PRECISION (TREE_TYPE (op0)))
5835 /* ... or signedness changes for division or modulus,
5836 then we cannot pass through this conversion. */
5837 || (code != MULT_EXPR
5838 && (TYPE_UNSIGNED (ctype)
5839 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5840 /* ... or has undefined overflow while the converted to
5841 type has not, we cannot do the operation in the inner type
5842 as that would introduce undefined overflow. */
5843 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5844 && !TYPE_OVERFLOW_UNDEFINED (type))))
5845 break;
5847 /* Pass the constant down and see if we can make a simplification. If
5848 we can, replace this expression with the inner simplification for
5849 possible later conversion to our or some other type. */
5850 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5851 && TREE_CODE (t2) == INTEGER_CST
5852 && !TREE_OVERFLOW (t2)
5853 && (0 != (t1 = extract_muldiv (op0, t2, code,
5854 code == MULT_EXPR
5855 ? ctype : NULL_TREE,
5856 strict_overflow_p))))
5857 return t1;
5858 break;
5860 case ABS_EXPR:
5861 /* If widening the type changes it from signed to unsigned, then we
5862 must avoid building ABS_EXPR itself as unsigned. */
5863 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5865 tree cstype = (*signed_type_for) (ctype);
5866 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5867 != 0)
5869 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5870 return fold_convert (ctype, t1);
5872 break;
5874 /* If the constant is negative, we cannot simplify this. */
5875 if (tree_int_cst_sgn (c) == -1)
5876 break;
5877 /* FALLTHROUGH */
5878 case NEGATE_EXPR:
5879 /* For division and modulus, type can't be unsigned, as e.g.
5880 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5881 For signed types, even with wrapping overflow, this is fine. */
5882 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5883 break;
5884 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5885 != 0)
5886 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5887 break;
5889 case MIN_EXPR: case MAX_EXPR:
5890 /* If widening the type changes the signedness, then we can't perform
5891 this optimization as that changes the result. */
5892 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5893 break;
5895 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5896 sub_strict_overflow_p = false;
5897 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5898 &sub_strict_overflow_p)) != 0
5899 && (t2 = extract_muldiv (op1, c, code, wide_type,
5900 &sub_strict_overflow_p)) != 0)
5902 if (tree_int_cst_sgn (c) < 0)
5903 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5904 if (sub_strict_overflow_p)
5905 *strict_overflow_p = true;
5906 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5907 fold_convert (ctype, t2));
5909 break;
5911 case LSHIFT_EXPR: case RSHIFT_EXPR:
5912 /* If the second operand is constant, this is a multiplication
5913 or floor division, by a power of two, so we can treat it that
5914 way unless the multiplier or divisor overflows. Signed
5915 left-shift overflow is implementation-defined rather than
5916 undefined in C90, so do not convert signed left shift into
5917 multiplication. */
5918 if (TREE_CODE (op1) == INTEGER_CST
5919 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5920 /* const_binop may not detect overflow correctly,
5921 so check for it explicitly here. */
5922 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5923 && TREE_INT_CST_HIGH (op1) == 0
5924 && 0 != (t1 = fold_convert (ctype,
5925 const_binop (LSHIFT_EXPR,
5926 size_one_node,
5927 op1)))
5928 && !TREE_OVERFLOW (t1))
5929 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5930 ? MULT_EXPR : FLOOR_DIV_EXPR,
5931 ctype,
5932 fold_convert (ctype, op0),
5933 t1),
5934 c, code, wide_type, strict_overflow_p);
5935 break;
5937 case PLUS_EXPR: case MINUS_EXPR:
5938 /* See if we can eliminate the operation on both sides. If we can, we
5939 can return a new PLUS or MINUS. If we can't, the only remaining
5940 cases where we can do anything are if the second operand is a
5941 constant. */
5942 sub_strict_overflow_p = false;
5943 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5944 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5945 if (t1 != 0 && t2 != 0
5946 && (code == MULT_EXPR
5947 /* If not multiplication, we can only do this if both operands
5948 are divisible by c. */
5949 || (multiple_of_p (ctype, op0, c)
5950 && multiple_of_p (ctype, op1, c))))
5952 if (sub_strict_overflow_p)
5953 *strict_overflow_p = true;
5954 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5955 fold_convert (ctype, t2));
5958 /* If this was a subtraction, negate OP1 and set it to be an addition.
5959 This simplifies the logic below. */
5960 if (tcode == MINUS_EXPR)
5962 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5963 /* If OP1 was not easily negatable, the constant may be OP0. */
5964 if (TREE_CODE (op0) == INTEGER_CST)
5966 tree tem = op0;
5967 op0 = op1;
5968 op1 = tem;
5969 tem = t1;
5970 t1 = t2;
5971 t2 = tem;
5975 if (TREE_CODE (op1) != INTEGER_CST)
5976 break;
5978 /* If either OP1 or C are negative, this optimization is not safe for
5979 some of the division and remainder types while for others we need
5980 to change the code. */
5981 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5983 if (code == CEIL_DIV_EXPR)
5984 code = FLOOR_DIV_EXPR;
5985 else if (code == FLOOR_DIV_EXPR)
5986 code = CEIL_DIV_EXPR;
5987 else if (code != MULT_EXPR
5988 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5989 break;
5992 /* If it's a multiply or a division/modulus operation of a multiple
5993 of our constant, do the operation and verify it doesn't overflow. */
5994 if (code == MULT_EXPR
5995 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5997 op1 = const_binop (code, fold_convert (ctype, op1),
5998 fold_convert (ctype, c));
5999 /* We allow the constant to overflow with wrapping semantics. */
6000 if (op1 == 0
6001 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6002 break;
6004 else
6005 break;
6007 /* If we have an unsigned type, we cannot widen the operation since it
6008 will change the result if the original computation overflowed. */
6009 if (TYPE_UNSIGNED (ctype) && ctype != type)
6010 break;
6012 /* If we were able to eliminate our operation from the first side,
6013 apply our operation to the second side and reform the PLUS. */
6014 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6015 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6017 /* The last case is if we are a multiply. In that case, we can
6018 apply the distributive law to commute the multiply and addition
6019 if the multiplication of the constants doesn't overflow
6020 and overflow is defined. With undefined overflow
6021 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6022 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6023 return fold_build2 (tcode, ctype,
6024 fold_build2 (code, ctype,
6025 fold_convert (ctype, op0),
6026 fold_convert (ctype, c)),
6027 op1);
6029 break;
6031 case MULT_EXPR:
6032 /* We have a special case here if we are doing something like
6033 (C * 8) % 4 since we know that's zero. */
6034 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6035 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6036 /* If the multiplication can overflow we cannot optimize this. */
6037 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6038 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6039 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6041 *strict_overflow_p = true;
6042 return omit_one_operand (type, integer_zero_node, op0);
6045 /* ... fall through ... */
6047 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6048 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6049 /* If we can extract our operation from the LHS, do so and return a
6050 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6051 do something only if the second operand is a constant. */
6052 if (same_p
6053 && (t1 = extract_muldiv (op0, c, code, wide_type,
6054 strict_overflow_p)) != 0)
6055 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6056 fold_convert (ctype, op1));
6057 else if (tcode == MULT_EXPR && code == MULT_EXPR
6058 && (t1 = extract_muldiv (op1, c, code, wide_type,
6059 strict_overflow_p)) != 0)
6060 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6061 fold_convert (ctype, t1));
6062 else if (TREE_CODE (op1) != INTEGER_CST)
6063 return 0;
6065 /* If these are the same operation types, we can associate them
6066 assuming no overflow. */
6067 if (tcode == code)
6069 double_int mul;
6070 bool overflow_p;
6071 unsigned prec = TYPE_PRECISION (ctype);
6072 bool uns = TYPE_UNSIGNED (ctype);
6073 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6074 double_int dic = tree_to_double_int (c).ext (prec, uns);
6075 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6076 overflow_p = ((!uns && overflow_p)
6077 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6078 if (!double_int_fits_to_tree_p (ctype, mul)
6079 && ((uns && tcode != MULT_EXPR) || !uns))
6080 overflow_p = 1;
6081 if (!overflow_p)
6082 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6083 double_int_to_tree (ctype, mul));
6086 /* If these operations "cancel" each other, we have the main
6087 optimizations of this pass, which occur when either constant is a
6088 multiple of the other, in which case we replace this with either an
6089 operation or CODE or TCODE.
6091 If we have an unsigned type, we cannot do this since it will change
6092 the result if the original computation overflowed. */
6093 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6094 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6095 || (tcode == MULT_EXPR
6096 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6097 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6098 && code != MULT_EXPR)))
6100 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6102 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6103 *strict_overflow_p = true;
6104 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6105 fold_convert (ctype,
6106 const_binop (TRUNC_DIV_EXPR,
6107 op1, c)));
6109 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6111 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6112 *strict_overflow_p = true;
6113 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6114 fold_convert (ctype,
6115 const_binop (TRUNC_DIV_EXPR,
6116 c, op1)));
6119 break;
6121 default:
6122 break;
6125 return 0;
6128 /* Return a node which has the indicated constant VALUE (either 0 or
6129 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6130 and is of the indicated TYPE. */
6132 tree
6133 constant_boolean_node (bool value, tree type)
6135 if (type == integer_type_node)
6136 return value ? integer_one_node : integer_zero_node;
6137 else if (type == boolean_type_node)
6138 return value ? boolean_true_node : boolean_false_node;
6139 else if (TREE_CODE (type) == VECTOR_TYPE)
6140 return build_vector_from_val (type,
6141 build_int_cst (TREE_TYPE (type),
6142 value ? -1 : 0));
6143 else
6144 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6148 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6149 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6150 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6151 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6152 COND is the first argument to CODE; otherwise (as in the example
6153 given here), it is the second argument. TYPE is the type of the
6154 original expression. Return NULL_TREE if no simplification is
6155 possible. */
6157 static tree
6158 fold_binary_op_with_conditional_arg (location_t loc,
6159 enum tree_code code,
6160 tree type, tree op0, tree op1,
6161 tree cond, tree arg, int cond_first_p)
6163 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6164 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6165 tree test, true_value, false_value;
6166 tree lhs = NULL_TREE;
6167 tree rhs = NULL_TREE;
6168 enum tree_code cond_code = COND_EXPR;
6170 if (TREE_CODE (cond) == COND_EXPR
6171 || TREE_CODE (cond) == VEC_COND_EXPR)
6173 test = TREE_OPERAND (cond, 0);
6174 true_value = TREE_OPERAND (cond, 1);
6175 false_value = TREE_OPERAND (cond, 2);
6176 /* If this operand throws an expression, then it does not make
6177 sense to try to perform a logical or arithmetic operation
6178 involving it. */
6179 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6180 lhs = true_value;
6181 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6182 rhs = false_value;
6184 else
6186 tree testtype = TREE_TYPE (cond);
6187 test = cond;
6188 true_value = constant_boolean_node (true, testtype);
6189 false_value = constant_boolean_node (false, testtype);
6192 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6193 cond_code = VEC_COND_EXPR;
6195 /* This transformation is only worthwhile if we don't have to wrap ARG
6196 in a SAVE_EXPR and the operation can be simplified without recursing
6197 on at least one of the branches once its pushed inside the COND_EXPR. */
6198 if (!TREE_CONSTANT (arg)
6199 && (TREE_SIDE_EFFECTS (arg)
6200 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6201 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6202 return NULL_TREE;
6204 arg = fold_convert_loc (loc, arg_type, arg);
6205 if (lhs == 0)
6207 true_value = fold_convert_loc (loc, cond_type, true_value);
6208 if (cond_first_p)
6209 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6210 else
6211 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6213 if (rhs == 0)
6215 false_value = fold_convert_loc (loc, cond_type, false_value);
6216 if (cond_first_p)
6217 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6218 else
6219 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6222 /* Check that we have simplified at least one of the branches. */
6223 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6224 return NULL_TREE;
6226 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6230 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6232 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6233 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6234 ADDEND is the same as X.
6236 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6237 and finite. The problematic cases are when X is zero, and its mode
6238 has signed zeros. In the case of rounding towards -infinity,
6239 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6240 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6242 bool
6243 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6245 if (!real_zerop (addend))
6246 return false;
6248 /* Don't allow the fold with -fsignaling-nans. */
6249 if (HONOR_SNANS (TYPE_MODE (type)))
6250 return false;
6252 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6253 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6254 return true;
6256 /* In a vector or complex, we would need to check the sign of all zeros. */
6257 if (TREE_CODE (addend) != REAL_CST)
6258 return false;
6260 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6261 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6262 negate = !negate;
6264 /* The mode has signed zeros, and we have to honor their sign.
6265 In this situation, there is only one case we can return true for.
6266 X - 0 is the same as X unless rounding towards -infinity is
6267 supported. */
6268 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6271 /* Subroutine of fold() that checks comparisons of built-in math
6272 functions against real constants.
6274 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6275 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6276 is the type of the result and ARG0 and ARG1 are the operands of the
6277 comparison. ARG1 must be a TREE_REAL_CST.
6279 The function returns the constant folded tree if a simplification
6280 can be made, and NULL_TREE otherwise. */
6282 static tree
6283 fold_mathfn_compare (location_t loc,
6284 enum built_in_function fcode, enum tree_code code,
6285 tree type, tree arg0, tree arg1)
6287 REAL_VALUE_TYPE c;
6289 if (BUILTIN_SQRT_P (fcode))
6291 tree arg = CALL_EXPR_ARG (arg0, 0);
6292 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6294 c = TREE_REAL_CST (arg1);
6295 if (REAL_VALUE_NEGATIVE (c))
6297 /* sqrt(x) < y is always false, if y is negative. */
6298 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6299 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6301 /* sqrt(x) > y is always true, if y is negative and we
6302 don't care about NaNs, i.e. negative values of x. */
6303 if (code == NE_EXPR || !HONOR_NANS (mode))
6304 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6306 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6307 return fold_build2_loc (loc, GE_EXPR, type, arg,
6308 build_real (TREE_TYPE (arg), dconst0));
6310 else if (code == GT_EXPR || code == GE_EXPR)
6312 REAL_VALUE_TYPE c2;
6314 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6315 real_convert (&c2, mode, &c2);
6317 if (REAL_VALUE_ISINF (c2))
6319 /* sqrt(x) > y is x == +Inf, when y is very large. */
6320 if (HONOR_INFINITIES (mode))
6321 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6322 build_real (TREE_TYPE (arg), c2));
6324 /* sqrt(x) > y is always false, when y is very large
6325 and we don't care about infinities. */
6326 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6329 /* sqrt(x) > c is the same as x > c*c. */
6330 return fold_build2_loc (loc, code, type, arg,
6331 build_real (TREE_TYPE (arg), c2));
6333 else if (code == LT_EXPR || code == LE_EXPR)
6335 REAL_VALUE_TYPE c2;
6337 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6338 real_convert (&c2, mode, &c2);
6340 if (REAL_VALUE_ISINF (c2))
6342 /* sqrt(x) < y is always true, when y is a very large
6343 value and we don't care about NaNs or Infinities. */
6344 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6345 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6347 /* sqrt(x) < y is x != +Inf when y is very large and we
6348 don't care about NaNs. */
6349 if (! HONOR_NANS (mode))
6350 return fold_build2_loc (loc, NE_EXPR, type, arg,
6351 build_real (TREE_TYPE (arg), c2));
6353 /* sqrt(x) < y is x >= 0 when y is very large and we
6354 don't care about Infinities. */
6355 if (! HONOR_INFINITIES (mode))
6356 return fold_build2_loc (loc, GE_EXPR, type, arg,
6357 build_real (TREE_TYPE (arg), dconst0));
6359 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6360 arg = save_expr (arg);
6361 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6362 fold_build2_loc (loc, GE_EXPR, type, arg,
6363 build_real (TREE_TYPE (arg),
6364 dconst0)),
6365 fold_build2_loc (loc, NE_EXPR, type, arg,
6366 build_real (TREE_TYPE (arg),
6367 c2)));
6370 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6371 if (! HONOR_NANS (mode))
6372 return fold_build2_loc (loc, code, type, arg,
6373 build_real (TREE_TYPE (arg), c2));
6375 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6376 arg = save_expr (arg);
6377 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6378 fold_build2_loc (loc, GE_EXPR, type, arg,
6379 build_real (TREE_TYPE (arg),
6380 dconst0)),
6381 fold_build2_loc (loc, code, type, arg,
6382 build_real (TREE_TYPE (arg),
6383 c2)));
6387 return NULL_TREE;
6390 /* Subroutine of fold() that optimizes comparisons against Infinities,
6391 either +Inf or -Inf.
6393 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6394 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6395 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6397 The function returns the constant folded tree if a simplification
6398 can be made, and NULL_TREE otherwise. */
6400 static tree
6401 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6402 tree arg0, tree arg1)
6404 enum machine_mode mode;
6405 REAL_VALUE_TYPE max;
6406 tree temp;
6407 bool neg;
6409 mode = TYPE_MODE (TREE_TYPE (arg0));
6411 /* For negative infinity swap the sense of the comparison. */
6412 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6413 if (neg)
6414 code = swap_tree_comparison (code);
6416 switch (code)
6418 case GT_EXPR:
6419 /* x > +Inf is always false, if with ignore sNANs. */
6420 if (HONOR_SNANS (mode))
6421 return NULL_TREE;
6422 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6424 case LE_EXPR:
6425 /* x <= +Inf is always true, if we don't case about NaNs. */
6426 if (! HONOR_NANS (mode))
6427 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6429 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6430 arg0 = save_expr (arg0);
6431 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6433 case EQ_EXPR:
6434 case GE_EXPR:
6435 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6436 real_maxval (&max, neg, mode);
6437 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6438 arg0, build_real (TREE_TYPE (arg0), max));
6440 case LT_EXPR:
6441 /* x < +Inf is always equal to x <= DBL_MAX. */
6442 real_maxval (&max, neg, mode);
6443 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6444 arg0, build_real (TREE_TYPE (arg0), max));
6446 case NE_EXPR:
6447 /* x != +Inf is always equal to !(x > DBL_MAX). */
6448 real_maxval (&max, neg, mode);
6449 if (! HONOR_NANS (mode))
6450 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6451 arg0, build_real (TREE_TYPE (arg0), max));
6453 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6454 arg0, build_real (TREE_TYPE (arg0), max));
6455 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6457 default:
6458 break;
6461 return NULL_TREE;
6464 /* Subroutine of fold() that optimizes comparisons of a division by
6465 a nonzero integer constant against an integer constant, i.e.
6466 X/C1 op C2.
6468 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6469 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6470 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6472 The function returns the constant folded tree if a simplification
6473 can be made, and NULL_TREE otherwise. */
6475 static tree
6476 fold_div_compare (location_t loc,
6477 enum tree_code code, tree type, tree arg0, tree arg1)
6479 tree prod, tmp, hi, lo;
6480 tree arg00 = TREE_OPERAND (arg0, 0);
6481 tree arg01 = TREE_OPERAND (arg0, 1);
6482 double_int val;
6483 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6484 bool neg_overflow;
6485 bool overflow;
6487 /* We have to do this the hard way to detect unsigned overflow.
6488 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6489 val = TREE_INT_CST (arg01)
6490 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6491 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6492 neg_overflow = false;
6494 if (unsigned_p)
6496 tmp = int_const_binop (MINUS_EXPR, arg01,
6497 build_int_cst (TREE_TYPE (arg01), 1));
6498 lo = prod;
6500 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6501 val = TREE_INT_CST (prod)
6502 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6503 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6504 -1, overflow | TREE_OVERFLOW (prod));
6506 else if (tree_int_cst_sgn (arg01) >= 0)
6508 tmp = int_const_binop (MINUS_EXPR, arg01,
6509 build_int_cst (TREE_TYPE (arg01), 1));
6510 switch (tree_int_cst_sgn (arg1))
6512 case -1:
6513 neg_overflow = true;
6514 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6515 hi = prod;
6516 break;
6518 case 0:
6519 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6520 hi = tmp;
6521 break;
6523 case 1:
6524 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6525 lo = prod;
6526 break;
6528 default:
6529 gcc_unreachable ();
6532 else
6534 /* A negative divisor reverses the relational operators. */
6535 code = swap_tree_comparison (code);
6537 tmp = int_const_binop (PLUS_EXPR, arg01,
6538 build_int_cst (TREE_TYPE (arg01), 1));
6539 switch (tree_int_cst_sgn (arg1))
6541 case -1:
6542 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6543 lo = prod;
6544 break;
6546 case 0:
6547 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6548 lo = tmp;
6549 break;
6551 case 1:
6552 neg_overflow = true;
6553 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6554 hi = prod;
6555 break;
6557 default:
6558 gcc_unreachable ();
6562 switch (code)
6564 case EQ_EXPR:
6565 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6566 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6567 if (TREE_OVERFLOW (hi))
6568 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6569 if (TREE_OVERFLOW (lo))
6570 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6571 return build_range_check (loc, type, arg00, 1, lo, hi);
6573 case NE_EXPR:
6574 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6575 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6576 if (TREE_OVERFLOW (hi))
6577 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6578 if (TREE_OVERFLOW (lo))
6579 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6580 return build_range_check (loc, type, arg00, 0, lo, hi);
6582 case LT_EXPR:
6583 if (TREE_OVERFLOW (lo))
6585 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6586 return omit_one_operand_loc (loc, type, tmp, arg00);
6588 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6590 case LE_EXPR:
6591 if (TREE_OVERFLOW (hi))
6593 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6594 return omit_one_operand_loc (loc, type, tmp, arg00);
6596 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6598 case GT_EXPR:
6599 if (TREE_OVERFLOW (hi))
6601 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6602 return omit_one_operand_loc (loc, type, tmp, arg00);
6604 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6606 case GE_EXPR:
6607 if (TREE_OVERFLOW (lo))
6609 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6610 return omit_one_operand_loc (loc, type, tmp, arg00);
6612 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6614 default:
6615 break;
6618 return NULL_TREE;
6622 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6623 equality/inequality test, then return a simplified form of the test
6624 using a sign testing. Otherwise return NULL. TYPE is the desired
6625 result type. */
6627 static tree
6628 fold_single_bit_test_into_sign_test (location_t loc,
6629 enum tree_code code, tree arg0, tree arg1,
6630 tree result_type)
6632 /* If this is testing a single bit, we can optimize the test. */
6633 if ((code == NE_EXPR || code == EQ_EXPR)
6634 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6635 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6637 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6638 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6639 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6641 if (arg00 != NULL_TREE
6642 /* This is only a win if casting to a signed type is cheap,
6643 i.e. when arg00's type is not a partial mode. */
6644 && TYPE_PRECISION (TREE_TYPE (arg00))
6645 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6647 tree stype = signed_type_for (TREE_TYPE (arg00));
6648 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6649 result_type,
6650 fold_convert_loc (loc, stype, arg00),
6651 build_int_cst (stype, 0));
6655 return NULL_TREE;
6658 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6659 equality/inequality test, then return a simplified form of
6660 the test using shifts and logical operations. Otherwise return
6661 NULL. TYPE is the desired result type. */
6663 tree
6664 fold_single_bit_test (location_t loc, enum tree_code code,
6665 tree arg0, tree arg1, tree result_type)
6667 /* If this is testing a single bit, we can optimize the test. */
6668 if ((code == NE_EXPR || code == EQ_EXPR)
6669 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6670 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6672 tree inner = TREE_OPERAND (arg0, 0);
6673 tree type = TREE_TYPE (arg0);
6674 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6675 enum machine_mode operand_mode = TYPE_MODE (type);
6676 int ops_unsigned;
6677 tree signed_type, unsigned_type, intermediate_type;
6678 tree tem, one;
6680 /* First, see if we can fold the single bit test into a sign-bit
6681 test. */
6682 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6683 result_type);
6684 if (tem)
6685 return tem;
6687 /* Otherwise we have (A & C) != 0 where C is a single bit,
6688 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6689 Similarly for (A & C) == 0. */
6691 /* If INNER is a right shift of a constant and it plus BITNUM does
6692 not overflow, adjust BITNUM and INNER. */
6693 if (TREE_CODE (inner) == RSHIFT_EXPR
6694 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6695 && tree_fits_uhwi_p (TREE_OPERAND (inner, 1))
6696 && bitnum < TYPE_PRECISION (type)
6697 && (tree_to_uhwi (TREE_OPERAND (inner, 1))
6698 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6700 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6701 inner = TREE_OPERAND (inner, 0);
6704 /* If we are going to be able to omit the AND below, we must do our
6705 operations as unsigned. If we must use the AND, we have a choice.
6706 Normally unsigned is faster, but for some machines signed is. */
6707 #ifdef LOAD_EXTEND_OP
6708 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6709 && !flag_syntax_only) ? 0 : 1;
6710 #else
6711 ops_unsigned = 1;
6712 #endif
6714 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6715 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6716 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6717 inner = fold_convert_loc (loc, intermediate_type, inner);
6719 if (bitnum != 0)
6720 inner = build2 (RSHIFT_EXPR, intermediate_type,
6721 inner, size_int (bitnum));
6723 one = build_int_cst (intermediate_type, 1);
6725 if (code == EQ_EXPR)
6726 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6728 /* Put the AND last so it can combine with more things. */
6729 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6731 /* Make sure to return the proper type. */
6732 inner = fold_convert_loc (loc, result_type, inner);
6734 return inner;
6736 return NULL_TREE;
6739 /* Check whether we are allowed to reorder operands arg0 and arg1,
6740 such that the evaluation of arg1 occurs before arg0. */
6742 static bool
6743 reorder_operands_p (const_tree arg0, const_tree arg1)
6745 if (! flag_evaluation_order)
6746 return true;
6747 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6748 return true;
6749 return ! TREE_SIDE_EFFECTS (arg0)
6750 && ! TREE_SIDE_EFFECTS (arg1);
6753 /* Test whether it is preferable two swap two operands, ARG0 and
6754 ARG1, for example because ARG0 is an integer constant and ARG1
6755 isn't. If REORDER is true, only recommend swapping if we can
6756 evaluate the operands in reverse order. */
6758 bool
6759 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6761 STRIP_SIGN_NOPS (arg0);
6762 STRIP_SIGN_NOPS (arg1);
6764 if (TREE_CODE (arg1) == INTEGER_CST)
6765 return 0;
6766 if (TREE_CODE (arg0) == INTEGER_CST)
6767 return 1;
6769 if (TREE_CODE (arg1) == REAL_CST)
6770 return 0;
6771 if (TREE_CODE (arg0) == REAL_CST)
6772 return 1;
6774 if (TREE_CODE (arg1) == FIXED_CST)
6775 return 0;
6776 if (TREE_CODE (arg0) == FIXED_CST)
6777 return 1;
6779 if (TREE_CODE (arg1) == COMPLEX_CST)
6780 return 0;
6781 if (TREE_CODE (arg0) == COMPLEX_CST)
6782 return 1;
6784 if (TREE_CONSTANT (arg1))
6785 return 0;
6786 if (TREE_CONSTANT (arg0))
6787 return 1;
6789 if (optimize_function_for_size_p (cfun))
6790 return 0;
6792 if (reorder && flag_evaluation_order
6793 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6794 return 0;
6796 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6797 for commutative and comparison operators. Ensuring a canonical
6798 form allows the optimizers to find additional redundancies without
6799 having to explicitly check for both orderings. */
6800 if (TREE_CODE (arg0) == SSA_NAME
6801 && TREE_CODE (arg1) == SSA_NAME
6802 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6803 return 1;
6805 /* Put SSA_NAMEs last. */
6806 if (TREE_CODE (arg1) == SSA_NAME)
6807 return 0;
6808 if (TREE_CODE (arg0) == SSA_NAME)
6809 return 1;
6811 /* Put variables last. */
6812 if (DECL_P (arg1))
6813 return 0;
6814 if (DECL_P (arg0))
6815 return 1;
6817 return 0;
6820 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6821 ARG0 is extended to a wider type. */
6823 static tree
6824 fold_widened_comparison (location_t loc, enum tree_code code,
6825 tree type, tree arg0, tree arg1)
6827 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6828 tree arg1_unw;
6829 tree shorter_type, outer_type;
6830 tree min, max;
6831 bool above, below;
6833 if (arg0_unw == arg0)
6834 return NULL_TREE;
6835 shorter_type = TREE_TYPE (arg0_unw);
6837 #ifdef HAVE_canonicalize_funcptr_for_compare
6838 /* Disable this optimization if we're casting a function pointer
6839 type on targets that require function pointer canonicalization. */
6840 if (HAVE_canonicalize_funcptr_for_compare
6841 && TREE_CODE (shorter_type) == POINTER_TYPE
6842 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6843 return NULL_TREE;
6844 #endif
6846 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6847 return NULL_TREE;
6849 arg1_unw = get_unwidened (arg1, NULL_TREE);
6851 /* If possible, express the comparison in the shorter mode. */
6852 if ((code == EQ_EXPR || code == NE_EXPR
6853 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6854 && (TREE_TYPE (arg1_unw) == shorter_type
6855 || ((TYPE_PRECISION (shorter_type)
6856 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6857 && (TYPE_UNSIGNED (shorter_type)
6858 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6859 || (TREE_CODE (arg1_unw) == INTEGER_CST
6860 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6861 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6862 && int_fits_type_p (arg1_unw, shorter_type))))
6863 return fold_build2_loc (loc, code, type, arg0_unw,
6864 fold_convert_loc (loc, shorter_type, arg1_unw));
6866 if (TREE_CODE (arg1_unw) != INTEGER_CST
6867 || TREE_CODE (shorter_type) != INTEGER_TYPE
6868 || !int_fits_type_p (arg1_unw, shorter_type))
6869 return NULL_TREE;
6871 /* If we are comparing with the integer that does not fit into the range
6872 of the shorter type, the result is known. */
6873 outer_type = TREE_TYPE (arg1_unw);
6874 min = lower_bound_in_type (outer_type, shorter_type);
6875 max = upper_bound_in_type (outer_type, shorter_type);
6877 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6878 max, arg1_unw));
6879 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6880 arg1_unw, min));
6882 switch (code)
6884 case EQ_EXPR:
6885 if (above || below)
6886 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6887 break;
6889 case NE_EXPR:
6890 if (above || below)
6891 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6892 break;
6894 case LT_EXPR:
6895 case LE_EXPR:
6896 if (above)
6897 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6898 else if (below)
6899 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6901 case GT_EXPR:
6902 case GE_EXPR:
6903 if (above)
6904 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6905 else if (below)
6906 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6908 default:
6909 break;
6912 return NULL_TREE;
6915 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6916 ARG0 just the signedness is changed. */
6918 static tree
6919 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6920 tree arg0, tree arg1)
6922 tree arg0_inner;
6923 tree inner_type, outer_type;
6925 if (!CONVERT_EXPR_P (arg0))
6926 return NULL_TREE;
6928 outer_type = TREE_TYPE (arg0);
6929 arg0_inner = TREE_OPERAND (arg0, 0);
6930 inner_type = TREE_TYPE (arg0_inner);
6932 #ifdef HAVE_canonicalize_funcptr_for_compare
6933 /* Disable this optimization if we're casting a function pointer
6934 type on targets that require function pointer canonicalization. */
6935 if (HAVE_canonicalize_funcptr_for_compare
6936 && TREE_CODE (inner_type) == POINTER_TYPE
6937 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6938 return NULL_TREE;
6939 #endif
6941 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6942 return NULL_TREE;
6944 if (TREE_CODE (arg1) != INTEGER_CST
6945 && !(CONVERT_EXPR_P (arg1)
6946 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6947 return NULL_TREE;
6949 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6950 && code != NE_EXPR
6951 && code != EQ_EXPR)
6952 return NULL_TREE;
6954 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6955 return NULL_TREE;
6957 if (TREE_CODE (arg1) == INTEGER_CST)
6958 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6959 0, TREE_OVERFLOW (arg1));
6960 else
6961 arg1 = fold_convert_loc (loc, inner_type, arg1);
6963 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6966 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6967 step of the array. Reconstructs s and delta in the case of s *
6968 delta being an integer constant (and thus already folded). ADDR is
6969 the address. MULT is the multiplicative expression. If the
6970 function succeeds, the new address expression is returned.
6971 Otherwise NULL_TREE is returned. LOC is the location of the
6972 resulting expression. */
6974 static tree
6975 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6977 tree s, delta, step;
6978 tree ref = TREE_OPERAND (addr, 0), pref;
6979 tree ret, pos;
6980 tree itype;
6981 bool mdim = false;
6983 /* Strip the nops that might be added when converting op1 to sizetype. */
6984 STRIP_NOPS (op1);
6986 /* Canonicalize op1 into a possibly non-constant delta
6987 and an INTEGER_CST s. */
6988 if (TREE_CODE (op1) == MULT_EXPR)
6990 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6992 STRIP_NOPS (arg0);
6993 STRIP_NOPS (arg1);
6995 if (TREE_CODE (arg0) == INTEGER_CST)
6997 s = arg0;
6998 delta = arg1;
7000 else if (TREE_CODE (arg1) == INTEGER_CST)
7002 s = arg1;
7003 delta = arg0;
7005 else
7006 return NULL_TREE;
7008 else if (TREE_CODE (op1) == INTEGER_CST)
7010 delta = op1;
7011 s = NULL_TREE;
7013 else
7015 /* Simulate we are delta * 1. */
7016 delta = op1;
7017 s = integer_one_node;
7020 /* Handle &x.array the same as we would handle &x.array[0]. */
7021 if (TREE_CODE (ref) == COMPONENT_REF
7022 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
7024 tree domain;
7026 /* Remember if this was a multi-dimensional array. */
7027 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7028 mdim = true;
7030 domain = TYPE_DOMAIN (TREE_TYPE (ref));
7031 if (! domain)
7032 goto cont;
7033 itype = TREE_TYPE (domain);
7035 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
7036 if (TREE_CODE (step) != INTEGER_CST)
7037 goto cont;
7039 if (s)
7041 if (! tree_int_cst_equal (step, s))
7042 goto cont;
7044 else
7046 /* Try if delta is a multiple of step. */
7047 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7048 if (! tmp)
7049 goto cont;
7050 delta = tmp;
7053 /* Only fold here if we can verify we do not overflow one
7054 dimension of a multi-dimensional array. */
7055 if (mdim)
7057 tree tmp;
7059 if (!TYPE_MIN_VALUE (domain)
7060 || !TYPE_MAX_VALUE (domain)
7061 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7062 goto cont;
7064 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7065 fold_convert_loc (loc, itype,
7066 TYPE_MIN_VALUE (domain)),
7067 fold_convert_loc (loc, itype, delta));
7068 if (TREE_CODE (tmp) != INTEGER_CST
7069 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7070 goto cont;
7073 /* We found a suitable component reference. */
7075 pref = TREE_OPERAND (addr, 0);
7076 ret = copy_node (pref);
7077 SET_EXPR_LOCATION (ret, loc);
7079 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7080 fold_build2_loc
7081 (loc, PLUS_EXPR, itype,
7082 fold_convert_loc (loc, itype,
7083 TYPE_MIN_VALUE
7084 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7085 fold_convert_loc (loc, itype, delta)),
7086 NULL_TREE, NULL_TREE);
7087 return build_fold_addr_expr_loc (loc, ret);
7090 cont:
7092 for (;; ref = TREE_OPERAND (ref, 0))
7094 if (TREE_CODE (ref) == ARRAY_REF)
7096 tree domain;
7098 /* Remember if this was a multi-dimensional array. */
7099 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7100 mdim = true;
7102 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7103 if (! domain)
7104 continue;
7105 itype = TREE_TYPE (domain);
7107 step = array_ref_element_size (ref);
7108 if (TREE_CODE (step) != INTEGER_CST)
7109 continue;
7111 if (s)
7113 if (! tree_int_cst_equal (step, s))
7114 continue;
7116 else
7118 /* Try if delta is a multiple of step. */
7119 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7120 if (! tmp)
7121 continue;
7122 delta = tmp;
7125 /* Only fold here if we can verify we do not overflow one
7126 dimension of a multi-dimensional array. */
7127 if (mdim)
7129 tree tmp;
7131 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7132 || !TYPE_MAX_VALUE (domain)
7133 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7134 continue;
7136 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7137 fold_convert_loc (loc, itype,
7138 TREE_OPERAND (ref, 1)),
7139 fold_convert_loc (loc, itype, delta));
7140 if (!tmp
7141 || TREE_CODE (tmp) != INTEGER_CST
7142 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7143 continue;
7146 break;
7148 else
7149 mdim = false;
7151 if (!handled_component_p (ref))
7152 return NULL_TREE;
7155 /* We found the suitable array reference. So copy everything up to it,
7156 and replace the index. */
7158 pref = TREE_OPERAND (addr, 0);
7159 ret = copy_node (pref);
7160 SET_EXPR_LOCATION (ret, loc);
7161 pos = ret;
7163 while (pref != ref)
7165 pref = TREE_OPERAND (pref, 0);
7166 TREE_OPERAND (pos, 0) = copy_node (pref);
7167 pos = TREE_OPERAND (pos, 0);
7170 TREE_OPERAND (pos, 1)
7171 = fold_build2_loc (loc, PLUS_EXPR, itype,
7172 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7173 fold_convert_loc (loc, itype, delta));
7174 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7178 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7179 means A >= Y && A != MAX, but in this case we know that
7180 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7182 static tree
7183 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7185 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7187 if (TREE_CODE (bound) == LT_EXPR)
7188 a = TREE_OPERAND (bound, 0);
7189 else if (TREE_CODE (bound) == GT_EXPR)
7190 a = TREE_OPERAND (bound, 1);
7191 else
7192 return NULL_TREE;
7194 typea = TREE_TYPE (a);
7195 if (!INTEGRAL_TYPE_P (typea)
7196 && !POINTER_TYPE_P (typea))
7197 return NULL_TREE;
7199 if (TREE_CODE (ineq) == LT_EXPR)
7201 a1 = TREE_OPERAND (ineq, 1);
7202 y = TREE_OPERAND (ineq, 0);
7204 else if (TREE_CODE (ineq) == GT_EXPR)
7206 a1 = TREE_OPERAND (ineq, 0);
7207 y = TREE_OPERAND (ineq, 1);
7209 else
7210 return NULL_TREE;
7212 if (TREE_TYPE (a1) != typea)
7213 return NULL_TREE;
7215 if (POINTER_TYPE_P (typea))
7217 /* Convert the pointer types into integer before taking the difference. */
7218 tree ta = fold_convert_loc (loc, ssizetype, a);
7219 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7220 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7222 else
7223 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7225 if (!diff || !integer_onep (diff))
7226 return NULL_TREE;
7228 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7231 /* Fold a sum or difference of at least one multiplication.
7232 Returns the folded tree or NULL if no simplification could be made. */
7234 static tree
7235 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7236 tree arg0, tree arg1)
7238 tree arg00, arg01, arg10, arg11;
7239 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7241 /* (A * C) +- (B * C) -> (A+-B) * C.
7242 (A * C) +- A -> A * (C+-1).
7243 We are most concerned about the case where C is a constant,
7244 but other combinations show up during loop reduction. Since
7245 it is not difficult, try all four possibilities. */
7247 if (TREE_CODE (arg0) == MULT_EXPR)
7249 arg00 = TREE_OPERAND (arg0, 0);
7250 arg01 = TREE_OPERAND (arg0, 1);
7252 else if (TREE_CODE (arg0) == INTEGER_CST)
7254 arg00 = build_one_cst (type);
7255 arg01 = arg0;
7257 else
7259 /* We cannot generate constant 1 for fract. */
7260 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7261 return NULL_TREE;
7262 arg00 = arg0;
7263 arg01 = build_one_cst (type);
7265 if (TREE_CODE (arg1) == MULT_EXPR)
7267 arg10 = TREE_OPERAND (arg1, 0);
7268 arg11 = TREE_OPERAND (arg1, 1);
7270 else if (TREE_CODE (arg1) == INTEGER_CST)
7272 arg10 = build_one_cst (type);
7273 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7274 the purpose of this canonicalization. */
7275 if (TREE_INT_CST_HIGH (arg1) == -1
7276 && negate_expr_p (arg1)
7277 && code == PLUS_EXPR)
7279 arg11 = negate_expr (arg1);
7280 code = MINUS_EXPR;
7282 else
7283 arg11 = arg1;
7285 else
7287 /* We cannot generate constant 1 for fract. */
7288 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7289 return NULL_TREE;
7290 arg10 = arg1;
7291 arg11 = build_one_cst (type);
7293 same = NULL_TREE;
7295 if (operand_equal_p (arg01, arg11, 0))
7296 same = arg01, alt0 = arg00, alt1 = arg10;
7297 else if (operand_equal_p (arg00, arg10, 0))
7298 same = arg00, alt0 = arg01, alt1 = arg11;
7299 else if (operand_equal_p (arg00, arg11, 0))
7300 same = arg00, alt0 = arg01, alt1 = arg10;
7301 else if (operand_equal_p (arg01, arg10, 0))
7302 same = arg01, alt0 = arg00, alt1 = arg11;
7304 /* No identical multiplicands; see if we can find a common
7305 power-of-two factor in non-power-of-two multiplies. This
7306 can help in multi-dimensional array access. */
7307 else if (tree_fits_shwi_p (arg01)
7308 && tree_fits_shwi_p (arg11))
7310 HOST_WIDE_INT int01, int11, tmp;
7311 bool swap = false;
7312 tree maybe_same;
7313 int01 = tree_to_shwi (arg01);
7314 int11 = tree_to_shwi (arg11);
7316 /* Move min of absolute values to int11. */
7317 if (absu_hwi (int01) < absu_hwi (int11))
7319 tmp = int01, int01 = int11, int11 = tmp;
7320 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7321 maybe_same = arg01;
7322 swap = true;
7324 else
7325 maybe_same = arg11;
7327 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7328 /* The remainder should not be a constant, otherwise we
7329 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7330 increased the number of multiplications necessary. */
7331 && TREE_CODE (arg10) != INTEGER_CST)
7333 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7334 build_int_cst (TREE_TYPE (arg00),
7335 int01 / int11));
7336 alt1 = arg10;
7337 same = maybe_same;
7338 if (swap)
7339 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7343 if (same)
7344 return fold_build2_loc (loc, MULT_EXPR, type,
7345 fold_build2_loc (loc, code, type,
7346 fold_convert_loc (loc, type, alt0),
7347 fold_convert_loc (loc, type, alt1)),
7348 fold_convert_loc (loc, type, same));
7350 return NULL_TREE;
7353 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7354 specified by EXPR into the buffer PTR of length LEN bytes.
7355 Return the number of bytes placed in the buffer, or zero
7356 upon failure. */
7358 static int
7359 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7361 tree type = TREE_TYPE (expr);
7362 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7363 int byte, offset, word, words;
7364 unsigned char value;
7366 if (total_bytes > len)
7367 return 0;
7368 words = total_bytes / UNITS_PER_WORD;
7370 for (byte = 0; byte < total_bytes; byte++)
7372 int bitpos = byte * BITS_PER_UNIT;
7373 if (bitpos < HOST_BITS_PER_WIDE_INT)
7374 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7375 else
7376 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7377 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7379 if (total_bytes > UNITS_PER_WORD)
7381 word = byte / UNITS_PER_WORD;
7382 if (WORDS_BIG_ENDIAN)
7383 word = (words - 1) - word;
7384 offset = word * UNITS_PER_WORD;
7385 if (BYTES_BIG_ENDIAN)
7386 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7387 else
7388 offset += byte % UNITS_PER_WORD;
7390 else
7391 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7392 ptr[offset] = value;
7394 return total_bytes;
7398 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7399 specified by EXPR into the buffer PTR of length LEN bytes.
7400 Return the number of bytes placed in the buffer, or zero
7401 upon failure. */
7403 static int
7404 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7406 tree type = TREE_TYPE (expr);
7407 enum machine_mode mode = TYPE_MODE (type);
7408 int total_bytes = GET_MODE_SIZE (mode);
7409 FIXED_VALUE_TYPE value;
7410 tree i_value, i_type;
7412 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7413 return 0;
7415 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7417 if (NULL_TREE == i_type
7418 || TYPE_PRECISION (i_type) != total_bytes)
7419 return 0;
7421 value = TREE_FIXED_CST (expr);
7422 i_value = double_int_to_tree (i_type, value.data);
7424 return native_encode_int (i_value, ptr, len);
7428 /* Subroutine of native_encode_expr. Encode the REAL_CST
7429 specified by EXPR into the buffer PTR of length LEN bytes.
7430 Return the number of bytes placed in the buffer, or zero
7431 upon failure. */
7433 static int
7434 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7436 tree type = TREE_TYPE (expr);
7437 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7438 int byte, offset, word, words, bitpos;
7439 unsigned char value;
7441 /* There are always 32 bits in each long, no matter the size of
7442 the hosts long. We handle floating point representations with
7443 up to 192 bits. */
7444 long tmp[6];
7446 if (total_bytes > len)
7447 return 0;
7448 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7450 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7452 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7453 bitpos += BITS_PER_UNIT)
7455 byte = (bitpos / BITS_PER_UNIT) & 3;
7456 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7458 if (UNITS_PER_WORD < 4)
7460 word = byte / UNITS_PER_WORD;
7461 if (WORDS_BIG_ENDIAN)
7462 word = (words - 1) - word;
7463 offset = word * UNITS_PER_WORD;
7464 if (BYTES_BIG_ENDIAN)
7465 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7466 else
7467 offset += byte % UNITS_PER_WORD;
7469 else
7470 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7471 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7473 return total_bytes;
7476 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7477 specified by EXPR into the buffer PTR of length LEN bytes.
7478 Return the number of bytes placed in the buffer, or zero
7479 upon failure. */
7481 static int
7482 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7484 int rsize, isize;
7485 tree part;
7487 part = TREE_REALPART (expr);
7488 rsize = native_encode_expr (part, ptr, len);
7489 if (rsize == 0)
7490 return 0;
7491 part = TREE_IMAGPART (expr);
7492 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7493 if (isize != rsize)
7494 return 0;
7495 return rsize + isize;
7499 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7500 specified by EXPR into the buffer PTR of length LEN bytes.
7501 Return the number of bytes placed in the buffer, or zero
7502 upon failure. */
7504 static int
7505 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7507 unsigned i, count;
7508 int size, offset;
7509 tree itype, elem;
7511 offset = 0;
7512 count = VECTOR_CST_NELTS (expr);
7513 itype = TREE_TYPE (TREE_TYPE (expr));
7514 size = GET_MODE_SIZE (TYPE_MODE (itype));
7515 for (i = 0; i < count; i++)
7517 elem = VECTOR_CST_ELT (expr, i);
7518 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7519 return 0;
7520 offset += size;
7522 return offset;
7526 /* Subroutine of native_encode_expr. Encode the STRING_CST
7527 specified by EXPR into the buffer PTR of length LEN bytes.
7528 Return the number of bytes placed in the buffer, or zero
7529 upon failure. */
7531 static int
7532 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7534 tree type = TREE_TYPE (expr);
7535 HOST_WIDE_INT total_bytes;
7537 if (TREE_CODE (type) != ARRAY_TYPE
7538 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7539 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7540 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7541 return 0;
7542 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7543 if (total_bytes > len)
7544 return 0;
7545 if (TREE_STRING_LENGTH (expr) < total_bytes)
7547 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7548 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7549 total_bytes - TREE_STRING_LENGTH (expr));
7551 else
7552 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7553 return total_bytes;
7557 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7558 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7559 buffer PTR of length LEN bytes. Return the number of bytes
7560 placed in the buffer, or zero upon failure. */
7563 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7565 switch (TREE_CODE (expr))
7567 case INTEGER_CST:
7568 return native_encode_int (expr, ptr, len);
7570 case REAL_CST:
7571 return native_encode_real (expr, ptr, len);
7573 case FIXED_CST:
7574 return native_encode_fixed (expr, ptr, len);
7576 case COMPLEX_CST:
7577 return native_encode_complex (expr, ptr, len);
7579 case VECTOR_CST:
7580 return native_encode_vector (expr, ptr, len);
7582 case STRING_CST:
7583 return native_encode_string (expr, ptr, len);
7585 default:
7586 return 0;
7591 /* Subroutine of native_interpret_expr. Interpret the contents of
7592 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7593 If the buffer cannot be interpreted, return NULL_TREE. */
7595 static tree
7596 native_interpret_int (tree type, const unsigned char *ptr, int len)
7598 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7599 double_int result;
7601 if (total_bytes > len
7602 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7603 return NULL_TREE;
7605 result = double_int::from_buffer (ptr, total_bytes);
7607 return double_int_to_tree (type, result);
7611 /* Subroutine of native_interpret_expr. Interpret the contents of
7612 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7613 If the buffer cannot be interpreted, return NULL_TREE. */
7615 static tree
7616 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7618 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7619 double_int result;
7620 FIXED_VALUE_TYPE fixed_value;
7622 if (total_bytes > len
7623 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7624 return NULL_TREE;
7626 result = double_int::from_buffer (ptr, total_bytes);
7627 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7629 return build_fixed (type, fixed_value);
7633 /* Subroutine of native_interpret_expr. Interpret the contents of
7634 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7635 If the buffer cannot be interpreted, return NULL_TREE. */
7637 static tree
7638 native_interpret_real (tree type, const unsigned char *ptr, int len)
7640 enum machine_mode mode = TYPE_MODE (type);
7641 int total_bytes = GET_MODE_SIZE (mode);
7642 int byte, offset, word, words, bitpos;
7643 unsigned char value;
7644 /* There are always 32 bits in each long, no matter the size of
7645 the hosts long. We handle floating point representations with
7646 up to 192 bits. */
7647 REAL_VALUE_TYPE r;
7648 long tmp[6];
7650 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7651 if (total_bytes > len || total_bytes > 24)
7652 return NULL_TREE;
7653 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7655 memset (tmp, 0, sizeof (tmp));
7656 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7657 bitpos += BITS_PER_UNIT)
7659 byte = (bitpos / BITS_PER_UNIT) & 3;
7660 if (UNITS_PER_WORD < 4)
7662 word = byte / UNITS_PER_WORD;
7663 if (WORDS_BIG_ENDIAN)
7664 word = (words - 1) - word;
7665 offset = word * UNITS_PER_WORD;
7666 if (BYTES_BIG_ENDIAN)
7667 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7668 else
7669 offset += byte % UNITS_PER_WORD;
7671 else
7672 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7673 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7675 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7678 real_from_target (&r, tmp, mode);
7679 return build_real (type, r);
7683 /* Subroutine of native_interpret_expr. Interpret the contents of
7684 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7685 If the buffer cannot be interpreted, return NULL_TREE. */
7687 static tree
7688 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7690 tree etype, rpart, ipart;
7691 int size;
7693 etype = TREE_TYPE (type);
7694 size = GET_MODE_SIZE (TYPE_MODE (etype));
7695 if (size * 2 > len)
7696 return NULL_TREE;
7697 rpart = native_interpret_expr (etype, ptr, size);
7698 if (!rpart)
7699 return NULL_TREE;
7700 ipart = native_interpret_expr (etype, ptr+size, size);
7701 if (!ipart)
7702 return NULL_TREE;
7703 return build_complex (type, rpart, ipart);
7707 /* Subroutine of native_interpret_expr. Interpret the contents of
7708 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7709 If the buffer cannot be interpreted, return NULL_TREE. */
7711 static tree
7712 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7714 tree etype, elem;
7715 int i, size, count;
7716 tree *elements;
7718 etype = TREE_TYPE (type);
7719 size = GET_MODE_SIZE (TYPE_MODE (etype));
7720 count = TYPE_VECTOR_SUBPARTS (type);
7721 if (size * count > len)
7722 return NULL_TREE;
7724 elements = XALLOCAVEC (tree, count);
7725 for (i = count - 1; i >= 0; i--)
7727 elem = native_interpret_expr (etype, ptr+(i*size), size);
7728 if (!elem)
7729 return NULL_TREE;
7730 elements[i] = elem;
7732 return build_vector (type, elements);
7736 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7737 the buffer PTR of length LEN as a constant of type TYPE. For
7738 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7739 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7740 return NULL_TREE. */
7742 tree
7743 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7745 switch (TREE_CODE (type))
7747 case INTEGER_TYPE:
7748 case ENUMERAL_TYPE:
7749 case BOOLEAN_TYPE:
7750 case POINTER_TYPE:
7751 case REFERENCE_TYPE:
7752 return native_interpret_int (type, ptr, len);
7754 case REAL_TYPE:
7755 return native_interpret_real (type, ptr, len);
7757 case FIXED_POINT_TYPE:
7758 return native_interpret_fixed (type, ptr, len);
7760 case COMPLEX_TYPE:
7761 return native_interpret_complex (type, ptr, len);
7763 case VECTOR_TYPE:
7764 return native_interpret_vector (type, ptr, len);
7766 default:
7767 return NULL_TREE;
7771 /* Returns true if we can interpret the contents of a native encoding
7772 as TYPE. */
7774 static bool
7775 can_native_interpret_type_p (tree type)
7777 switch (TREE_CODE (type))
7779 case INTEGER_TYPE:
7780 case ENUMERAL_TYPE:
7781 case BOOLEAN_TYPE:
7782 case POINTER_TYPE:
7783 case REFERENCE_TYPE:
7784 case FIXED_POINT_TYPE:
7785 case REAL_TYPE:
7786 case COMPLEX_TYPE:
7787 case VECTOR_TYPE:
7788 return true;
7789 default:
7790 return false;
7794 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7795 TYPE at compile-time. If we're unable to perform the conversion
7796 return NULL_TREE. */
7798 static tree
7799 fold_view_convert_expr (tree type, tree expr)
7801 /* We support up to 512-bit values (for V8DFmode). */
7802 unsigned char buffer[64];
7803 int len;
7805 /* Check that the host and target are sane. */
7806 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7807 return NULL_TREE;
7809 len = native_encode_expr (expr, buffer, sizeof (buffer));
7810 if (len == 0)
7811 return NULL_TREE;
7813 return native_interpret_expr (type, buffer, len);
7816 /* Build an expression for the address of T. Folds away INDIRECT_REF
7817 to avoid confusing the gimplify process. */
7819 tree
7820 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7822 /* The size of the object is not relevant when talking about its address. */
7823 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7824 t = TREE_OPERAND (t, 0);
7826 if (TREE_CODE (t) == INDIRECT_REF)
7828 t = TREE_OPERAND (t, 0);
7830 if (TREE_TYPE (t) != ptrtype)
7831 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7833 else if (TREE_CODE (t) == MEM_REF
7834 && integer_zerop (TREE_OPERAND (t, 1)))
7835 return TREE_OPERAND (t, 0);
7836 else if (TREE_CODE (t) == MEM_REF
7837 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7838 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7839 TREE_OPERAND (t, 0),
7840 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7841 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7843 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7845 if (TREE_TYPE (t) != ptrtype)
7846 t = fold_convert_loc (loc, ptrtype, t);
7848 else
7849 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7851 return t;
7854 /* Build an expression for the address of T. */
7856 tree
7857 build_fold_addr_expr_loc (location_t loc, tree t)
7859 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7861 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7864 static bool vec_cst_ctor_to_array (tree, tree *);
7866 /* Fold a unary expression of code CODE and type TYPE with operand
7867 OP0. Return the folded expression if folding is successful.
7868 Otherwise, return NULL_TREE. */
7870 static tree
7871 fold_unary_loc_1 (location_t loc, enum tree_code code, tree type, tree op0)
7873 tree tem;
7874 tree arg0;
7875 enum tree_code_class kind = TREE_CODE_CLASS (code);
7877 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7878 && TREE_CODE_LENGTH (code) == 1);
7880 arg0 = op0;
7881 if (arg0)
7883 if (CONVERT_EXPR_CODE_P (code)
7884 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7886 /* Don't use STRIP_NOPS, because signedness of argument type
7887 matters. */
7888 STRIP_SIGN_NOPS (arg0);
7890 else
7892 /* Strip any conversions that don't change the mode. This
7893 is safe for every expression, except for a comparison
7894 expression because its signedness is derived from its
7895 operands.
7897 Note that this is done as an internal manipulation within
7898 the constant folder, in order to find the simplest
7899 representation of the arguments so that their form can be
7900 studied. In any cases, the appropriate type conversions
7901 should be put back in the tree that will get out of the
7902 constant folder. */
7903 STRIP_NOPS (arg0);
7907 if (TREE_CODE_CLASS (code) == tcc_unary)
7909 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7910 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7911 fold_build1_loc (loc, code, type,
7912 fold_convert_loc (loc, TREE_TYPE (op0),
7913 TREE_OPERAND (arg0, 1))));
7914 else if (TREE_CODE (arg0) == COND_EXPR)
7916 tree arg01 = TREE_OPERAND (arg0, 1);
7917 tree arg02 = TREE_OPERAND (arg0, 2);
7918 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7919 arg01 = fold_build1_loc (loc, code, type,
7920 fold_convert_loc (loc,
7921 TREE_TYPE (op0), arg01));
7922 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7923 arg02 = fold_build1_loc (loc, code, type,
7924 fold_convert_loc (loc,
7925 TREE_TYPE (op0), arg02));
7926 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7927 arg01, arg02);
7929 /* If this was a conversion, and all we did was to move into
7930 inside the COND_EXPR, bring it back out. But leave it if
7931 it is a conversion from integer to integer and the
7932 result precision is no wider than a word since such a
7933 conversion is cheap and may be optimized away by combine,
7934 while it couldn't if it were outside the COND_EXPR. Then return
7935 so we don't get into an infinite recursion loop taking the
7936 conversion out and then back in. */
7938 if ((CONVERT_EXPR_CODE_P (code)
7939 || code == NON_LVALUE_EXPR)
7940 && TREE_CODE (tem) == COND_EXPR
7941 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7942 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7943 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7944 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7945 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7946 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7947 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7948 && (INTEGRAL_TYPE_P
7949 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7950 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7951 || flag_syntax_only))
7952 tem = build1_loc (loc, code, type,
7953 build3 (COND_EXPR,
7954 TREE_TYPE (TREE_OPERAND
7955 (TREE_OPERAND (tem, 1), 0)),
7956 TREE_OPERAND (tem, 0),
7957 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7958 TREE_OPERAND (TREE_OPERAND (tem, 2),
7959 0)));
7960 return tem;
7964 switch (code)
7966 case PAREN_EXPR:
7967 /* Re-association barriers around constants and other re-association
7968 barriers can be removed. */
7969 if (CONSTANT_CLASS_P (op0)
7970 || TREE_CODE (op0) == PAREN_EXPR)
7971 return fold_convert_loc (loc, type, op0);
7972 return NULL_TREE;
7974 CASE_CONVERT:
7975 case FLOAT_EXPR:
7976 case FIX_TRUNC_EXPR:
7977 if (TREE_TYPE (op0) == type)
7978 return op0;
7980 if (COMPARISON_CLASS_P (op0))
7982 /* If we have (type) (a CMP b) and type is an integral type, return
7983 new expression involving the new type. Canonicalize
7984 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7985 non-integral type.
7986 Do not fold the result as that would not simplify further, also
7987 folding again results in recursions. */
7988 if (TREE_CODE (type) == BOOLEAN_TYPE)
7989 return build2_loc (loc, TREE_CODE (op0), type,
7990 TREE_OPERAND (op0, 0),
7991 TREE_OPERAND (op0, 1));
7992 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7993 && TREE_CODE (type) != VECTOR_TYPE)
7994 return build3_loc (loc, COND_EXPR, type, op0,
7995 constant_boolean_node (true, type),
7996 constant_boolean_node (false, type));
7999 /* Handle cases of two conversions in a row. */
8000 if (CONVERT_EXPR_P (op0))
8002 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8003 tree inter_type = TREE_TYPE (op0);
8004 int inside_int = INTEGRAL_TYPE_P (inside_type);
8005 int inside_ptr = POINTER_TYPE_P (inside_type);
8006 int inside_float = FLOAT_TYPE_P (inside_type);
8007 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8008 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8009 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8010 int inter_int = INTEGRAL_TYPE_P (inter_type);
8011 int inter_ptr = POINTER_TYPE_P (inter_type);
8012 int inter_float = FLOAT_TYPE_P (inter_type);
8013 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8014 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8015 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8016 int final_int = INTEGRAL_TYPE_P (type);
8017 int final_ptr = POINTER_TYPE_P (type);
8018 int final_float = FLOAT_TYPE_P (type);
8019 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8020 unsigned int final_prec = TYPE_PRECISION (type);
8021 int final_unsignedp = TYPE_UNSIGNED (type);
8023 /* In addition to the cases of two conversions in a row
8024 handled below, if we are converting something to its own
8025 type via an object of identical or wider precision, neither
8026 conversion is needed. */
8027 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8028 && (((inter_int || inter_ptr) && final_int)
8029 || (inter_float && final_float))
8030 && inter_prec >= final_prec)
8031 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8033 /* Likewise, if the intermediate and initial types are either both
8034 float or both integer, we don't need the middle conversion if the
8035 former is wider than the latter and doesn't change the signedness
8036 (for integers). Avoid this if the final type is a pointer since
8037 then we sometimes need the middle conversion. Likewise if the
8038 final type has a precision not equal to the size of its mode. */
8039 if (((inter_int && inside_int)
8040 || (inter_float && inside_float)
8041 || (inter_vec && inside_vec))
8042 && inter_prec >= inside_prec
8043 && (inter_float || inter_vec
8044 || inter_unsignedp == inside_unsignedp)
8045 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8046 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8047 && ! final_ptr
8048 && (! final_vec || inter_prec == inside_prec))
8049 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8051 /* If we have a sign-extension of a zero-extended value, we can
8052 replace that by a single zero-extension. Likewise if the
8053 final conversion does not change precision we can drop the
8054 intermediate conversion. */
8055 if (inside_int && inter_int && final_int
8056 && ((inside_prec < inter_prec && inter_prec < final_prec
8057 && inside_unsignedp && !inter_unsignedp)
8058 || final_prec == inter_prec))
8059 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8061 /* Two conversions in a row are not needed unless:
8062 - some conversion is floating-point (overstrict for now), or
8063 - some conversion is a vector (overstrict for now), or
8064 - the intermediate type is narrower than both initial and
8065 final, or
8066 - the intermediate type and innermost type differ in signedness,
8067 and the outermost type is wider than the intermediate, or
8068 - the initial type is a pointer type and the precisions of the
8069 intermediate and final types differ, or
8070 - the final type is a pointer type and the precisions of the
8071 initial and intermediate types differ. */
8072 if (! inside_float && ! inter_float && ! final_float
8073 && ! inside_vec && ! inter_vec && ! final_vec
8074 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8075 && ! (inside_int && inter_int
8076 && inter_unsignedp != inside_unsignedp
8077 && inter_prec < final_prec)
8078 && ((inter_unsignedp && inter_prec > inside_prec)
8079 == (final_unsignedp && final_prec > inter_prec))
8080 && ! (inside_ptr && inter_prec != final_prec)
8081 && ! (final_ptr && inside_prec != inter_prec)
8082 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8083 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8084 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8087 /* Handle (T *)&A.B.C for A being of type T and B and C
8088 living at offset zero. This occurs frequently in
8089 C++ upcasting and then accessing the base. */
8090 if (TREE_CODE (op0) == ADDR_EXPR
8091 && POINTER_TYPE_P (type)
8092 && handled_component_p (TREE_OPERAND (op0, 0)))
8094 HOST_WIDE_INT bitsize, bitpos;
8095 tree offset;
8096 enum machine_mode mode;
8097 int unsignedp, volatilep;
8098 tree base = TREE_OPERAND (op0, 0);
8099 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8100 &mode, &unsignedp, &volatilep, false);
8101 /* If the reference was to a (constant) zero offset, we can use
8102 the address of the base if it has the same base type
8103 as the result type and the pointer type is unqualified. */
8104 if (! offset && bitpos == 0
8105 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8106 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8107 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8108 return fold_convert_loc (loc, type,
8109 build_fold_addr_expr_loc (loc, base));
8112 if (TREE_CODE (op0) == MODIFY_EXPR
8113 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8114 /* Detect assigning a bitfield. */
8115 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8116 && DECL_BIT_FIELD
8117 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8119 /* Don't leave an assignment inside a conversion
8120 unless assigning a bitfield. */
8121 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8122 /* First do the assignment, then return converted constant. */
8123 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8124 TREE_NO_WARNING (tem) = 1;
8125 TREE_USED (tem) = 1;
8126 return tem;
8129 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8130 constants (if x has signed type, the sign bit cannot be set
8131 in c). This folds extension into the BIT_AND_EXPR.
8132 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8133 very likely don't have maximal range for their precision and this
8134 transformation effectively doesn't preserve non-maximal ranges. */
8135 if (TREE_CODE (type) == INTEGER_TYPE
8136 && TREE_CODE (op0) == BIT_AND_EXPR
8137 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8139 tree and_expr = op0;
8140 tree and0 = TREE_OPERAND (and_expr, 0);
8141 tree and1 = TREE_OPERAND (and_expr, 1);
8142 int change = 0;
8144 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8145 || (TYPE_PRECISION (type)
8146 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8147 change = 1;
8148 else if (TYPE_PRECISION (TREE_TYPE (and1))
8149 <= HOST_BITS_PER_WIDE_INT
8150 && tree_fits_uhwi_p (and1))
8152 unsigned HOST_WIDE_INT cst;
8154 cst = tree_to_uhwi (and1);
8155 cst &= HOST_WIDE_INT_M1U
8156 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8157 change = (cst == 0);
8158 #ifdef LOAD_EXTEND_OP
8159 if (change
8160 && !flag_syntax_only
8161 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8162 == ZERO_EXTEND))
8164 tree uns = unsigned_type_for (TREE_TYPE (and0));
8165 and0 = fold_convert_loc (loc, uns, and0);
8166 and1 = fold_convert_loc (loc, uns, and1);
8168 #endif
8170 if (change)
8172 tem = force_fit_type_double (type, tree_to_double_int (and1),
8173 0, TREE_OVERFLOW (and1));
8174 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8175 fold_convert_loc (loc, type, and0), tem);
8179 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8180 when one of the new casts will fold away. Conservatively we assume
8181 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8182 if (POINTER_TYPE_P (type)
8183 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8184 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8185 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8186 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8187 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8189 tree arg00 = TREE_OPERAND (arg0, 0);
8190 tree arg01 = TREE_OPERAND (arg0, 1);
8192 return fold_build_pointer_plus_loc
8193 (loc, fold_convert_loc (loc, type, arg00), arg01);
8196 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8197 of the same precision, and X is an integer type not narrower than
8198 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8199 if (INTEGRAL_TYPE_P (type)
8200 && TREE_CODE (op0) == BIT_NOT_EXPR
8201 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8202 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8203 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8205 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8206 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8207 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8208 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8209 fold_convert_loc (loc, type, tem));
8212 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8213 type of X and Y (integer types only). */
8214 if (INTEGRAL_TYPE_P (type)
8215 && TREE_CODE (op0) == MULT_EXPR
8216 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8217 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8219 /* Be careful not to introduce new overflows. */
8220 tree mult_type;
8221 if (TYPE_OVERFLOW_WRAPS (type))
8222 mult_type = type;
8223 else
8224 mult_type = unsigned_type_for (type);
8226 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8228 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8229 fold_convert_loc (loc, mult_type,
8230 TREE_OPERAND (op0, 0)),
8231 fold_convert_loc (loc, mult_type,
8232 TREE_OPERAND (op0, 1)));
8233 return fold_convert_loc (loc, type, tem);
8237 tem = fold_convert_const (code, type, op0);
8238 return tem ? tem : NULL_TREE;
8240 case ADDR_SPACE_CONVERT_EXPR:
8241 if (integer_zerop (arg0))
8242 return fold_convert_const (code, type, arg0);
8243 return NULL_TREE;
8245 case FIXED_CONVERT_EXPR:
8246 tem = fold_convert_const (code, type, arg0);
8247 return tem ? tem : NULL_TREE;
8249 case VIEW_CONVERT_EXPR:
8250 if (TREE_TYPE (op0) == type)
8251 return op0;
8252 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8253 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8254 type, TREE_OPERAND (op0, 0));
8255 if (TREE_CODE (op0) == MEM_REF)
8256 return fold_build2_loc (loc, MEM_REF, type,
8257 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8259 /* For integral conversions with the same precision or pointer
8260 conversions use a NOP_EXPR instead. */
8261 if ((INTEGRAL_TYPE_P (type)
8262 || POINTER_TYPE_P (type))
8263 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8264 || POINTER_TYPE_P (TREE_TYPE (op0)))
8265 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8266 return fold_convert_loc (loc, type, op0);
8268 /* Strip inner integral conversions that do not change the precision. */
8269 if (CONVERT_EXPR_P (op0)
8270 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8271 || POINTER_TYPE_P (TREE_TYPE (op0)))
8272 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8273 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8274 && (TYPE_PRECISION (TREE_TYPE (op0))
8275 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8276 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8277 type, TREE_OPERAND (op0, 0));
8279 return fold_view_convert_expr (type, op0);
8281 case NEGATE_EXPR:
8282 tem = fold_negate_expr (loc, arg0);
8283 if (tem)
8284 return fold_convert_loc (loc, type, tem);
8285 return NULL_TREE;
8287 case ABS_EXPR:
8288 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8289 return fold_abs_const (arg0, type);
8290 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8291 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8292 /* Convert fabs((double)float) into (double)fabsf(float). */
8293 else if (TREE_CODE (arg0) == NOP_EXPR
8294 && TREE_CODE (type) == REAL_TYPE)
8296 tree targ0 = strip_float_extensions (arg0);
8297 if (targ0 != arg0)
8298 return fold_convert_loc (loc, type,
8299 fold_build1_loc (loc, ABS_EXPR,
8300 TREE_TYPE (targ0),
8301 targ0));
8303 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8304 else if (TREE_CODE (arg0) == ABS_EXPR)
8305 return arg0;
8306 else if (tree_expr_nonnegative_p (arg0))
8307 return arg0;
8309 /* Strip sign ops from argument. */
8310 if (TREE_CODE (type) == REAL_TYPE)
8312 tem = fold_strip_sign_ops (arg0);
8313 if (tem)
8314 return fold_build1_loc (loc, ABS_EXPR, type,
8315 fold_convert_loc (loc, type, tem));
8317 return NULL_TREE;
8319 case CONJ_EXPR:
8320 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8321 return fold_convert_loc (loc, type, arg0);
8322 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8324 tree itype = TREE_TYPE (type);
8325 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8326 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8327 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8328 negate_expr (ipart));
8330 if (TREE_CODE (arg0) == COMPLEX_CST)
8332 tree itype = TREE_TYPE (type);
8333 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8334 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8335 return build_complex (type, rpart, negate_expr (ipart));
8337 if (TREE_CODE (arg0) == CONJ_EXPR)
8338 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8339 return NULL_TREE;
8341 case BIT_NOT_EXPR:
8342 if (TREE_CODE (arg0) == INTEGER_CST)
8343 return fold_not_const (arg0, type);
8344 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8345 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8346 /* Convert ~ (-A) to A - 1. */
8347 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8348 return fold_build2_loc (loc, MINUS_EXPR, type,
8349 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8350 build_int_cst (type, 1));
8351 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8352 else if (INTEGRAL_TYPE_P (type)
8353 && ((TREE_CODE (arg0) == MINUS_EXPR
8354 && integer_onep (TREE_OPERAND (arg0, 1)))
8355 || (TREE_CODE (arg0) == PLUS_EXPR
8356 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8357 return fold_build1_loc (loc, NEGATE_EXPR, type,
8358 fold_convert_loc (loc, type,
8359 TREE_OPERAND (arg0, 0)));
8360 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8361 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8362 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8363 fold_convert_loc (loc, type,
8364 TREE_OPERAND (arg0, 0)))))
8365 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8366 fold_convert_loc (loc, type,
8367 TREE_OPERAND (arg0, 1)));
8368 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8369 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8370 fold_convert_loc (loc, type,
8371 TREE_OPERAND (arg0, 1)))))
8372 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8373 fold_convert_loc (loc, type,
8374 TREE_OPERAND (arg0, 0)), tem);
8375 /* Perform BIT_NOT_EXPR on each element individually. */
8376 else if (TREE_CODE (arg0) == VECTOR_CST)
8378 tree *elements;
8379 tree elem;
8380 unsigned count = VECTOR_CST_NELTS (arg0), i;
8382 elements = XALLOCAVEC (tree, count);
8383 for (i = 0; i < count; i++)
8385 elem = VECTOR_CST_ELT (arg0, i);
8386 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8387 if (elem == NULL_TREE)
8388 break;
8389 elements[i] = elem;
8391 if (i == count)
8392 return build_vector (type, elements);
8394 else if (COMPARISON_CLASS_P (arg0)
8395 && (VECTOR_TYPE_P (type)
8396 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8398 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8399 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8400 HONOR_NANS (TYPE_MODE (op_type)));
8401 if (subcode != ERROR_MARK)
8402 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8403 TREE_OPERAND (arg0, 1));
8407 return NULL_TREE;
8409 case TRUTH_NOT_EXPR:
8410 /* Note that the operand of this must be an int
8411 and its values must be 0 or 1.
8412 ("true" is a fixed value perhaps depending on the language,
8413 but we don't handle values other than 1 correctly yet.) */
8414 tem = fold_truth_not_expr (loc, arg0);
8415 if (!tem)
8416 return NULL_TREE;
8417 return fold_convert_loc (loc, type, tem);
8419 case REALPART_EXPR:
8420 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8421 return fold_convert_loc (loc, type, arg0);
8422 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8423 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8424 TREE_OPERAND (arg0, 1));
8425 if (TREE_CODE (arg0) == COMPLEX_CST)
8426 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8427 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8429 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8430 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8431 fold_build1_loc (loc, REALPART_EXPR, itype,
8432 TREE_OPERAND (arg0, 0)),
8433 fold_build1_loc (loc, REALPART_EXPR, itype,
8434 TREE_OPERAND (arg0, 1)));
8435 return fold_convert_loc (loc, type, tem);
8437 if (TREE_CODE (arg0) == CONJ_EXPR)
8439 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8440 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8441 TREE_OPERAND (arg0, 0));
8442 return fold_convert_loc (loc, type, tem);
8444 if (TREE_CODE (arg0) == CALL_EXPR)
8446 tree fn = get_callee_fndecl (arg0);
8447 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8448 switch (DECL_FUNCTION_CODE (fn))
8450 CASE_FLT_FN (BUILT_IN_CEXPI):
8451 fn = mathfn_built_in (type, BUILT_IN_COS);
8452 if (fn)
8453 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8454 break;
8456 default:
8457 break;
8460 return NULL_TREE;
8462 case IMAGPART_EXPR:
8463 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8464 return build_zero_cst (type);
8465 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8466 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8467 TREE_OPERAND (arg0, 0));
8468 if (TREE_CODE (arg0) == COMPLEX_CST)
8469 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8470 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8472 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8473 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8474 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8475 TREE_OPERAND (arg0, 0)),
8476 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8477 TREE_OPERAND (arg0, 1)));
8478 return fold_convert_loc (loc, type, tem);
8480 if (TREE_CODE (arg0) == CONJ_EXPR)
8482 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8483 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8484 return fold_convert_loc (loc, type, negate_expr (tem));
8486 if (TREE_CODE (arg0) == CALL_EXPR)
8488 tree fn = get_callee_fndecl (arg0);
8489 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8490 switch (DECL_FUNCTION_CODE (fn))
8492 CASE_FLT_FN (BUILT_IN_CEXPI):
8493 fn = mathfn_built_in (type, BUILT_IN_SIN);
8494 if (fn)
8495 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8496 break;
8498 default:
8499 break;
8502 return NULL_TREE;
8504 case INDIRECT_REF:
8505 /* Fold *&X to X if X is an lvalue. */
8506 if (TREE_CODE (op0) == ADDR_EXPR)
8508 tree op00 = TREE_OPERAND (op0, 0);
8509 if ((TREE_CODE (op00) == VAR_DECL
8510 || TREE_CODE (op00) == PARM_DECL
8511 || TREE_CODE (op00) == RESULT_DECL)
8512 && !TREE_READONLY (op00))
8513 return op00;
8515 return NULL_TREE;
8517 case VEC_UNPACK_LO_EXPR:
8518 case VEC_UNPACK_HI_EXPR:
8519 case VEC_UNPACK_FLOAT_LO_EXPR:
8520 case VEC_UNPACK_FLOAT_HI_EXPR:
8522 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8523 tree *elts;
8524 enum tree_code subcode;
8526 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8527 if (TREE_CODE (arg0) != VECTOR_CST)
8528 return NULL_TREE;
8530 elts = XALLOCAVEC (tree, nelts * 2);
8531 if (!vec_cst_ctor_to_array (arg0, elts))
8532 return NULL_TREE;
8534 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8535 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8536 elts += nelts;
8538 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8539 subcode = NOP_EXPR;
8540 else
8541 subcode = FLOAT_EXPR;
8543 for (i = 0; i < nelts; i++)
8545 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8546 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8547 return NULL_TREE;
8550 return build_vector (type, elts);
8553 case REDUC_MIN_EXPR:
8554 case REDUC_MAX_EXPR:
8555 case REDUC_PLUS_EXPR:
8557 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8558 tree *elts;
8559 enum tree_code subcode;
8561 if (TREE_CODE (op0) != VECTOR_CST)
8562 return NULL_TREE;
8564 elts = XALLOCAVEC (tree, nelts);
8565 if (!vec_cst_ctor_to_array (op0, elts))
8566 return NULL_TREE;
8568 switch (code)
8570 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8571 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8572 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8573 default: gcc_unreachable ();
8576 for (i = 1; i < nelts; i++)
8578 elts[0] = const_binop (subcode, elts[0], elts[i]);
8579 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8580 return NULL_TREE;
8581 elts[i] = build_zero_cst (TREE_TYPE (type));
8584 return build_vector (type, elts);
8587 default:
8588 return NULL_TREE;
8589 } /* switch (code) */
8592 /* Given an expression tree EXP, set the EXPR_FOLDED flag, and if it is
8593 a nop, recursively set the EXPR_FOLDED flag of its operand. */
8595 static void
8596 set_expr_folded_flag (tree exp)
8598 /* FIXME -- can not set the flag on SSA_NAME, the flag overlaps
8599 with the version member. */
8600 if (TREE_CODE (exp) == SSA_NAME)
8601 return;
8603 EXPR_FOLDED (exp) = 1;
8605 /* If EXP is a nop (i.e. NON_LVALUE_EXPRs and NOP_EXPRs), we need to
8606 recursively set the EXPR_FOLDED flag of its operand because the
8607 expression will be stripped later. */
8608 while ((CONVERT_EXPR_P (exp)
8609 || TREE_CODE (exp) == NON_LVALUE_EXPR)
8610 && TREE_OPERAND (exp, 0) != error_mark_node)
8612 exp = TREE_OPERAND (exp, 0);
8613 /* FIXME -- can not set the flag on SSA_NAME, the flag overlaps
8614 with the version member. */
8615 if (TREE_CODE (exp) != SSA_NAME)
8616 EXPR_FOLDED (exp) = 1;
8620 /* Fold a unary expression of code CODE and type TYPE with operand
8621 OP0. Return the folded expression if folding is successful.
8622 Otherwise, return NULL_TREE.
8623 This is a wrapper around fold_unary_1 function (which does the
8624 actual folding). Set the EXPR_FOLDED flag of the folded expression
8625 if folding is successful. */
8627 tree
8628 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8630 tree tem = fold_unary_loc_1 (loc, code, type, op0);
8631 if (tem)
8632 set_expr_folded_flag (tem);
8633 return tem;
8636 /* If the operation was a conversion do _not_ mark a resulting constant
8637 with TREE_OVERFLOW if the original constant was not. These conversions
8638 have implementation defined behavior and retaining the TREE_OVERFLOW
8639 flag here would confuse later passes such as VRP. */
8640 tree
8641 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8642 tree type, tree op0)
8644 tree res = fold_unary_loc (loc, code, type, op0);
8645 if (res
8646 && TREE_CODE (res) == INTEGER_CST
8647 && TREE_CODE (op0) == INTEGER_CST
8648 && CONVERT_EXPR_CODE_P (code))
8649 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8651 return res;
8654 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8655 operands OP0 and OP1. LOC is the location of the resulting expression.
8656 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8657 Return the folded expression if folding is successful. Otherwise,
8658 return NULL_TREE. */
8659 static tree
8660 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8661 tree arg0, tree arg1, tree op0, tree op1)
8663 tree tem;
8665 /* We only do these simplifications if we are optimizing. */
8666 if (!optimize)
8667 return NULL_TREE;
8669 /* Check for things like (A || B) && (A || C). We can convert this
8670 to A || (B && C). Note that either operator can be any of the four
8671 truth and/or operations and the transformation will still be
8672 valid. Also note that we only care about order for the
8673 ANDIF and ORIF operators. If B contains side effects, this
8674 might change the truth-value of A. */
8675 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8676 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8677 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8678 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8679 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8680 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8682 tree a00 = TREE_OPERAND (arg0, 0);
8683 tree a01 = TREE_OPERAND (arg0, 1);
8684 tree a10 = TREE_OPERAND (arg1, 0);
8685 tree a11 = TREE_OPERAND (arg1, 1);
8686 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8687 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8688 && (code == TRUTH_AND_EXPR
8689 || code == TRUTH_OR_EXPR));
8691 if (operand_equal_p (a00, a10, 0))
8692 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8693 fold_build2_loc (loc, code, type, a01, a11));
8694 else if (commutative && operand_equal_p (a00, a11, 0))
8695 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8696 fold_build2_loc (loc, code, type, a01, a10));
8697 else if (commutative && operand_equal_p (a01, a10, 0))
8698 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8699 fold_build2_loc (loc, code, type, a00, a11));
8701 /* This case if tricky because we must either have commutative
8702 operators or else A10 must not have side-effects. */
8704 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8705 && operand_equal_p (a01, a11, 0))
8706 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8707 fold_build2_loc (loc, code, type, a00, a10),
8708 a01);
8711 /* See if we can build a range comparison. */
8712 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8713 return tem;
8715 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8716 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8718 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8719 if (tem)
8720 return fold_build2_loc (loc, code, type, tem, arg1);
8723 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8724 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8726 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8727 if (tem)
8728 return fold_build2_loc (loc, code, type, arg0, tem);
8731 /* Check for the possibility of merging component references. If our
8732 lhs is another similar operation, try to merge its rhs with our
8733 rhs. Then try to merge our lhs and rhs. */
8734 if (TREE_CODE (arg0) == code
8735 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8736 TREE_OPERAND (arg0, 1), arg1)))
8737 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8739 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8740 return tem;
8742 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8743 && (code == TRUTH_AND_EXPR
8744 || code == TRUTH_ANDIF_EXPR
8745 || code == TRUTH_OR_EXPR
8746 || code == TRUTH_ORIF_EXPR))
8748 enum tree_code ncode, icode;
8750 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8751 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8752 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8754 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8755 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8756 We don't want to pack more than two leafs to a non-IF AND/OR
8757 expression.
8758 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8759 equal to IF-CODE, then we don't want to add right-hand operand.
8760 If the inner right-hand side of left-hand operand has
8761 side-effects, or isn't simple, then we can't add to it,
8762 as otherwise we might destroy if-sequence. */
8763 if (TREE_CODE (arg0) == icode
8764 && simple_operand_p_2 (arg1)
8765 /* Needed for sequence points to handle trappings, and
8766 side-effects. */
8767 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8769 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8770 arg1);
8771 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8772 tem);
8774 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8775 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8776 else if (TREE_CODE (arg1) == icode
8777 && simple_operand_p_2 (arg0)
8778 /* Needed for sequence points to handle trappings, and
8779 side-effects. */
8780 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8782 tem = fold_build2_loc (loc, ncode, type,
8783 arg0, TREE_OPERAND (arg1, 0));
8784 return fold_build2_loc (loc, icode, type, tem,
8785 TREE_OPERAND (arg1, 1));
8787 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8788 into (A OR B).
8789 For sequence point consistancy, we need to check for trapping,
8790 and side-effects. */
8791 else if (code == icode && simple_operand_p_2 (arg0)
8792 && simple_operand_p_2 (arg1))
8793 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8796 return NULL_TREE;
8799 /* Fold a binary expression of code CODE and type TYPE with operands
8800 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8801 Return the folded expression if folding is successful. Otherwise,
8802 return NULL_TREE. */
8804 static tree
8805 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8807 enum tree_code compl_code;
8809 if (code == MIN_EXPR)
8810 compl_code = MAX_EXPR;
8811 else if (code == MAX_EXPR)
8812 compl_code = MIN_EXPR;
8813 else
8814 gcc_unreachable ();
8816 /* MIN (MAX (a, b), b) == b. */
8817 if (TREE_CODE (op0) == compl_code
8818 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8819 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8821 /* MIN (MAX (b, a), b) == b. */
8822 if (TREE_CODE (op0) == compl_code
8823 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8824 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8825 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8827 /* MIN (a, MAX (a, b)) == a. */
8828 if (TREE_CODE (op1) == compl_code
8829 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8830 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8831 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8833 /* MIN (a, MAX (b, a)) == a. */
8834 if (TREE_CODE (op1) == compl_code
8835 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8836 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8837 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8839 return NULL_TREE;
8842 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8843 by changing CODE to reduce the magnitude of constants involved in
8844 ARG0 of the comparison.
8845 Returns a canonicalized comparison tree if a simplification was
8846 possible, otherwise returns NULL_TREE.
8847 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8848 valid if signed overflow is undefined. */
8850 static tree
8851 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8852 tree arg0, tree arg1,
8853 bool *strict_overflow_p)
8855 enum tree_code code0 = TREE_CODE (arg0);
8856 tree t, cst0 = NULL_TREE;
8857 int sgn0;
8858 bool swap = false;
8860 /* Match A +- CST code arg1 and CST code arg1. We can change the
8861 first form only if overflow is undefined. */
8862 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8863 /* In principle pointers also have undefined overflow behavior,
8864 but that causes problems elsewhere. */
8865 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8866 && (code0 == MINUS_EXPR
8867 || code0 == PLUS_EXPR)
8868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8869 || code0 == INTEGER_CST))
8870 return NULL_TREE;
8872 /* Identify the constant in arg0 and its sign. */
8873 if (code0 == INTEGER_CST)
8874 cst0 = arg0;
8875 else
8876 cst0 = TREE_OPERAND (arg0, 1);
8877 sgn0 = tree_int_cst_sgn (cst0);
8879 /* Overflowed constants and zero will cause problems. */
8880 if (integer_zerop (cst0)
8881 || TREE_OVERFLOW (cst0))
8882 return NULL_TREE;
8884 /* See if we can reduce the magnitude of the constant in
8885 arg0 by changing the comparison code. */
8886 if (code0 == INTEGER_CST)
8888 /* CST <= arg1 -> CST-1 < arg1. */
8889 if (code == LE_EXPR && sgn0 == 1)
8890 code = LT_EXPR;
8891 /* -CST < arg1 -> -CST-1 <= arg1. */
8892 else if (code == LT_EXPR && sgn0 == -1)
8893 code = LE_EXPR;
8894 /* CST > arg1 -> CST-1 >= arg1. */
8895 else if (code == GT_EXPR && sgn0 == 1)
8896 code = GE_EXPR;
8897 /* -CST >= arg1 -> -CST-1 > arg1. */
8898 else if (code == GE_EXPR && sgn0 == -1)
8899 code = GT_EXPR;
8900 else
8901 return NULL_TREE;
8902 /* arg1 code' CST' might be more canonical. */
8903 swap = true;
8905 else
8907 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8908 if (code == LT_EXPR
8909 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8910 code = LE_EXPR;
8911 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8912 else if (code == GT_EXPR
8913 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8914 code = GE_EXPR;
8915 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8916 else if (code == LE_EXPR
8917 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8918 code = LT_EXPR;
8919 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8920 else if (code == GE_EXPR
8921 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8922 code = GT_EXPR;
8923 else
8924 return NULL_TREE;
8925 *strict_overflow_p = true;
8928 /* Now build the constant reduced in magnitude. But not if that
8929 would produce one outside of its types range. */
8930 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8931 && ((sgn0 == 1
8932 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8933 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8934 || (sgn0 == -1
8935 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8936 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8937 /* We cannot swap the comparison here as that would cause us to
8938 endlessly recurse. */
8939 return NULL_TREE;
8941 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8942 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8943 if (code0 != INTEGER_CST)
8944 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8945 t = fold_convert (TREE_TYPE (arg1), t);
8947 /* If swapping might yield to a more canonical form, do so. */
8948 if (swap)
8949 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8950 else
8951 return fold_build2_loc (loc, code, type, t, arg1);
8954 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8955 overflow further. Try to decrease the magnitude of constants involved
8956 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8957 and put sole constants at the second argument position.
8958 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8960 static tree
8961 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8962 tree arg0, tree arg1)
8964 tree t;
8965 bool strict_overflow_p;
8966 const char * const warnmsg = G_("assuming signed overflow does not occur "
8967 "when reducing constant in comparison");
8969 /* Try canonicalization by simplifying arg0. */
8970 strict_overflow_p = false;
8971 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8972 &strict_overflow_p);
8973 if (t)
8975 if (strict_overflow_p)
8976 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8977 return t;
8980 /* Try canonicalization by simplifying arg1 using the swapped
8981 comparison. */
8982 code = swap_tree_comparison (code);
8983 strict_overflow_p = false;
8984 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8985 &strict_overflow_p);
8986 if (t && strict_overflow_p)
8987 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8988 return t;
8991 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8992 space. This is used to avoid issuing overflow warnings for
8993 expressions like &p->x which can not wrap. */
8995 static bool
8996 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8998 double_int di_offset, total;
9000 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9001 return true;
9003 if (bitpos < 0)
9004 return true;
9006 if (offset == NULL_TREE)
9007 di_offset = double_int_zero;
9008 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9009 return true;
9010 else
9011 di_offset = TREE_INT_CST (offset);
9013 bool overflow;
9014 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
9015 total = di_offset.add_with_sign (units, true, &overflow);
9016 if (overflow)
9017 return true;
9019 if (total.high != 0)
9020 return true;
9022 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9023 if (size <= 0)
9024 return true;
9026 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9027 array. */
9028 if (TREE_CODE (base) == ADDR_EXPR)
9030 HOST_WIDE_INT base_size;
9032 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9033 if (base_size > 0 && size < base_size)
9034 size = base_size;
9037 return total.low > (unsigned HOST_WIDE_INT) size;
9040 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
9041 kind INTEGER_CST. This makes sure to properly sign-extend the
9042 constant. */
9044 static HOST_WIDE_INT
9045 size_low_cst (const_tree t)
9047 double_int d = tree_to_double_int (t);
9048 return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
9051 /* Subroutine of fold_binary. This routine performs all of the
9052 transformations that are common to the equality/inequality
9053 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9054 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9055 fold_binary should call fold_binary. Fold a comparison with
9056 tree code CODE and type TYPE with operands OP0 and OP1. Return
9057 the folded comparison or NULL_TREE. */
9059 static tree
9060 fold_comparison (location_t loc, enum tree_code code, tree type,
9061 tree op0, tree op1)
9063 tree arg0, arg1, tem;
9065 arg0 = op0;
9066 arg1 = op1;
9068 STRIP_SIGN_NOPS (arg0);
9069 STRIP_SIGN_NOPS (arg1);
9071 tem = fold_relational_const (code, type, arg0, arg1);
9072 if (tem != NULL_TREE)
9073 return tem;
9075 /* If one arg is a real or integer constant, put it last. */
9076 if (tree_swap_operands_p (arg0, arg1, true))
9077 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9079 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9080 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9081 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9082 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9083 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9084 && (TREE_CODE (arg1) == INTEGER_CST
9085 && !TREE_OVERFLOW (arg1)))
9087 tree const1 = TREE_OPERAND (arg0, 1);
9088 tree const2 = arg1;
9089 tree variable = TREE_OPERAND (arg0, 0);
9090 tree lhs;
9091 int lhs_add;
9092 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9094 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9095 TREE_TYPE (arg1), const2, const1);
9097 /* If the constant operation overflowed this can be
9098 simplified as a comparison against INT_MAX/INT_MIN. */
9099 if (TREE_CODE (lhs) == INTEGER_CST
9100 && TREE_OVERFLOW (lhs)
9101 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9103 int const1_sgn = tree_int_cst_sgn (const1);
9104 enum tree_code code2 = code;
9106 /* Get the sign of the constant on the lhs if the
9107 operation were VARIABLE + CONST1. */
9108 if (TREE_CODE (arg0) == MINUS_EXPR)
9109 const1_sgn = -const1_sgn;
9111 /* The sign of the constant determines if we overflowed
9112 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9113 Canonicalize to the INT_MIN overflow by swapping the comparison
9114 if necessary. */
9115 if (const1_sgn == -1)
9116 code2 = swap_tree_comparison (code);
9118 /* We now can look at the canonicalized case
9119 VARIABLE + 1 CODE2 INT_MIN
9120 and decide on the result. */
9121 if (code2 == LT_EXPR
9122 || code2 == LE_EXPR
9123 || code2 == EQ_EXPR)
9124 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9125 else if (code2 == NE_EXPR
9126 || code2 == GE_EXPR
9127 || code2 == GT_EXPR)
9128 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9131 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9132 && (TREE_CODE (lhs) != INTEGER_CST
9133 || !TREE_OVERFLOW (lhs)))
9135 if (code != EQ_EXPR && code != NE_EXPR)
9136 fold_overflow_warning ("assuming signed overflow does not occur "
9137 "when changing X +- C1 cmp C2 to "
9138 "X cmp C1 +- C2",
9139 WARN_STRICT_OVERFLOW_COMPARISON);
9140 return fold_build2_loc (loc, code, type, variable, lhs);
9144 /* For comparisons of pointers we can decompose it to a compile time
9145 comparison of the base objects and the offsets into the object.
9146 This requires at least one operand being an ADDR_EXPR or a
9147 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9148 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9149 && (TREE_CODE (arg0) == ADDR_EXPR
9150 || TREE_CODE (arg1) == ADDR_EXPR
9151 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9152 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9154 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9155 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9156 enum machine_mode mode;
9157 int volatilep, unsignedp;
9158 bool indirect_base0 = false, indirect_base1 = false;
9160 /* Get base and offset for the access. Strip ADDR_EXPR for
9161 get_inner_reference, but put it back by stripping INDIRECT_REF
9162 off the base object if possible. indirect_baseN will be true
9163 if baseN is not an address but refers to the object itself. */
9164 base0 = arg0;
9165 if (TREE_CODE (arg0) == ADDR_EXPR)
9167 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9168 &bitsize, &bitpos0, &offset0, &mode,
9169 &unsignedp, &volatilep, false);
9170 if (TREE_CODE (base0) == INDIRECT_REF)
9171 base0 = TREE_OPERAND (base0, 0);
9172 else
9173 indirect_base0 = true;
9175 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9177 base0 = TREE_OPERAND (arg0, 0);
9178 STRIP_SIGN_NOPS (base0);
9179 if (TREE_CODE (base0) == ADDR_EXPR)
9181 base0 = TREE_OPERAND (base0, 0);
9182 indirect_base0 = true;
9184 offset0 = TREE_OPERAND (arg0, 1);
9185 if (tree_fits_shwi_p (offset0))
9187 HOST_WIDE_INT off = size_low_cst (offset0);
9188 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9189 * BITS_PER_UNIT)
9190 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9192 bitpos0 = off * BITS_PER_UNIT;
9193 offset0 = NULL_TREE;
9198 base1 = arg1;
9199 if (TREE_CODE (arg1) == ADDR_EXPR)
9201 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9202 &bitsize, &bitpos1, &offset1, &mode,
9203 &unsignedp, &volatilep, false);
9204 if (TREE_CODE (base1) == INDIRECT_REF)
9205 base1 = TREE_OPERAND (base1, 0);
9206 else
9207 indirect_base1 = true;
9209 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9211 base1 = TREE_OPERAND (arg1, 0);
9212 STRIP_SIGN_NOPS (base1);
9213 if (TREE_CODE (base1) == ADDR_EXPR)
9215 base1 = TREE_OPERAND (base1, 0);
9216 indirect_base1 = true;
9218 offset1 = TREE_OPERAND (arg1, 1);
9219 if (tree_fits_shwi_p (offset1))
9221 HOST_WIDE_INT off = size_low_cst (offset1);
9222 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9223 * BITS_PER_UNIT)
9224 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9226 bitpos1 = off * BITS_PER_UNIT;
9227 offset1 = NULL_TREE;
9232 /* A local variable can never be pointed to by
9233 the default SSA name of an incoming parameter. */
9234 if ((TREE_CODE (arg0) == ADDR_EXPR
9235 && indirect_base0
9236 && TREE_CODE (base0) == VAR_DECL
9237 && auto_var_in_fn_p (base0, current_function_decl)
9238 && !indirect_base1
9239 && TREE_CODE (base1) == SSA_NAME
9240 && SSA_NAME_IS_DEFAULT_DEF (base1)
9241 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9242 || (TREE_CODE (arg1) == ADDR_EXPR
9243 && indirect_base1
9244 && TREE_CODE (base1) == VAR_DECL
9245 && auto_var_in_fn_p (base1, current_function_decl)
9246 && !indirect_base0
9247 && TREE_CODE (base0) == SSA_NAME
9248 && SSA_NAME_IS_DEFAULT_DEF (base0)
9249 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9251 if (code == NE_EXPR)
9252 return constant_boolean_node (1, type);
9253 else if (code == EQ_EXPR)
9254 return constant_boolean_node (0, type);
9256 /* If we have equivalent bases we might be able to simplify. */
9257 else if (indirect_base0 == indirect_base1
9258 && operand_equal_p (base0, base1, 0))
9260 /* We can fold this expression to a constant if the non-constant
9261 offset parts are equal. */
9262 if ((offset0 == offset1
9263 || (offset0 && offset1
9264 && operand_equal_p (offset0, offset1, 0)))
9265 && (code == EQ_EXPR
9266 || code == NE_EXPR
9267 || (indirect_base0 && DECL_P (base0))
9268 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9271 if (code != EQ_EXPR
9272 && code != NE_EXPR
9273 && bitpos0 != bitpos1
9274 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9275 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9276 fold_overflow_warning (("assuming pointer wraparound does not "
9277 "occur when comparing P +- C1 with "
9278 "P +- C2"),
9279 WARN_STRICT_OVERFLOW_CONDITIONAL);
9281 switch (code)
9283 case EQ_EXPR:
9284 return constant_boolean_node (bitpos0 == bitpos1, type);
9285 case NE_EXPR:
9286 return constant_boolean_node (bitpos0 != bitpos1, type);
9287 case LT_EXPR:
9288 return constant_boolean_node (bitpos0 < bitpos1, type);
9289 case LE_EXPR:
9290 return constant_boolean_node (bitpos0 <= bitpos1, type);
9291 case GE_EXPR:
9292 return constant_boolean_node (bitpos0 >= bitpos1, type);
9293 case GT_EXPR:
9294 return constant_boolean_node (bitpos0 > bitpos1, type);
9295 default:;
9298 /* We can simplify the comparison to a comparison of the variable
9299 offset parts if the constant offset parts are equal.
9300 Be careful to use signed sizetype here because otherwise we
9301 mess with array offsets in the wrong way. This is possible
9302 because pointer arithmetic is restricted to retain within an
9303 object and overflow on pointer differences is undefined as of
9304 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9305 else if (bitpos0 == bitpos1
9306 && ((code == EQ_EXPR || code == NE_EXPR)
9307 || (indirect_base0 && DECL_P (base0))
9308 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9310 /* By converting to signed sizetype we cover middle-end pointer
9311 arithmetic which operates on unsigned pointer types of size
9312 type size and ARRAY_REF offsets which are properly sign or
9313 zero extended from their type in case it is narrower than
9314 sizetype. */
9315 if (offset0 == NULL_TREE)
9316 offset0 = build_int_cst (ssizetype, 0);
9317 else
9318 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9319 if (offset1 == NULL_TREE)
9320 offset1 = build_int_cst (ssizetype, 0);
9321 else
9322 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9324 if (code != EQ_EXPR
9325 && code != NE_EXPR
9326 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9327 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9328 fold_overflow_warning (("assuming pointer wraparound does not "
9329 "occur when comparing P +- C1 with "
9330 "P +- C2"),
9331 WARN_STRICT_OVERFLOW_COMPARISON);
9333 return fold_build2_loc (loc, code, type, offset0, offset1);
9336 /* For non-equal bases we can simplify if they are addresses
9337 of local binding decls or constants. */
9338 else if (indirect_base0 && indirect_base1
9339 /* We know that !operand_equal_p (base0, base1, 0)
9340 because the if condition was false. But make
9341 sure two decls are not the same. */
9342 && base0 != base1
9343 && TREE_CODE (arg0) == ADDR_EXPR
9344 && TREE_CODE (arg1) == ADDR_EXPR
9345 && (((TREE_CODE (base0) == VAR_DECL
9346 || TREE_CODE (base0) == PARM_DECL)
9347 && (targetm.binds_local_p (base0)
9348 || CONSTANT_CLASS_P (base1)))
9349 || CONSTANT_CLASS_P (base0))
9350 && (((TREE_CODE (base1) == VAR_DECL
9351 || TREE_CODE (base1) == PARM_DECL)
9352 && (targetm.binds_local_p (base1)
9353 || CONSTANT_CLASS_P (base0)))
9354 || CONSTANT_CLASS_P (base1)))
9356 if (code == EQ_EXPR)
9357 return omit_two_operands_loc (loc, type, boolean_false_node,
9358 arg0, arg1);
9359 else if (code == NE_EXPR)
9360 return omit_two_operands_loc (loc, type, boolean_true_node,
9361 arg0, arg1);
9363 /* For equal offsets we can simplify to a comparison of the
9364 base addresses. */
9365 else if (bitpos0 == bitpos1
9366 && (indirect_base0
9367 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9368 && (indirect_base1
9369 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9370 && ((offset0 == offset1)
9371 || (offset0 && offset1
9372 && operand_equal_p (offset0, offset1, 0))))
9374 if (indirect_base0)
9375 base0 = build_fold_addr_expr_loc (loc, base0);
9376 if (indirect_base1)
9377 base1 = build_fold_addr_expr_loc (loc, base1);
9378 return fold_build2_loc (loc, code, type, base0, base1);
9382 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9383 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9384 the resulting offset is smaller in absolute value than the
9385 original one and has the same sign. */
9386 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9387 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9388 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9389 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9390 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9391 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9392 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9394 tree const1 = TREE_OPERAND (arg0, 1);
9395 tree const2 = TREE_OPERAND (arg1, 1);
9396 tree variable1 = TREE_OPERAND (arg0, 0);
9397 tree variable2 = TREE_OPERAND (arg1, 0);
9398 tree cst;
9399 const char * const warnmsg = G_("assuming signed overflow does not "
9400 "occur when combining constants around "
9401 "a comparison");
9403 /* Put the constant on the side where it doesn't overflow and is
9404 of lower absolute value and of same sign than before. */
9405 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9406 ? MINUS_EXPR : PLUS_EXPR,
9407 const2, const1);
9408 if (!TREE_OVERFLOW (cst)
9409 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9410 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9412 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9413 return fold_build2_loc (loc, code, type,
9414 variable1,
9415 fold_build2_loc (loc, TREE_CODE (arg1),
9416 TREE_TYPE (arg1),
9417 variable2, cst));
9420 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9421 ? MINUS_EXPR : PLUS_EXPR,
9422 const1, const2);
9423 if (!TREE_OVERFLOW (cst)
9424 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9425 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9427 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9428 return fold_build2_loc (loc, code, type,
9429 fold_build2_loc (loc, TREE_CODE (arg0),
9430 TREE_TYPE (arg0),
9431 variable1, cst),
9432 variable2);
9436 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9437 signed arithmetic case. That form is created by the compiler
9438 often enough for folding it to be of value. One example is in
9439 computing loop trip counts after Operator Strength Reduction. */
9440 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9441 && TREE_CODE (arg0) == MULT_EXPR
9442 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9443 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9444 && integer_zerop (arg1))
9446 tree const1 = TREE_OPERAND (arg0, 1);
9447 tree const2 = arg1; /* zero */
9448 tree variable1 = TREE_OPERAND (arg0, 0);
9449 enum tree_code cmp_code = code;
9451 /* Handle unfolded multiplication by zero. */
9452 if (integer_zerop (const1))
9453 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9455 fold_overflow_warning (("assuming signed overflow does not occur when "
9456 "eliminating multiplication in comparison "
9457 "with zero"),
9458 WARN_STRICT_OVERFLOW_COMPARISON);
9460 /* If const1 is negative we swap the sense of the comparison. */
9461 if (tree_int_cst_sgn (const1) < 0)
9462 cmp_code = swap_tree_comparison (cmp_code);
9464 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9467 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9468 if (tem)
9469 return tem;
9471 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9473 tree targ0 = strip_float_extensions (arg0);
9474 tree targ1 = strip_float_extensions (arg1);
9475 tree newtype = TREE_TYPE (targ0);
9477 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9478 newtype = TREE_TYPE (targ1);
9480 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9481 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9482 return fold_build2_loc (loc, code, type,
9483 fold_convert_loc (loc, newtype, targ0),
9484 fold_convert_loc (loc, newtype, targ1));
9486 /* (-a) CMP (-b) -> b CMP a */
9487 if (TREE_CODE (arg0) == NEGATE_EXPR
9488 && TREE_CODE (arg1) == NEGATE_EXPR)
9489 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9490 TREE_OPERAND (arg0, 0));
9492 if (TREE_CODE (arg1) == REAL_CST)
9494 REAL_VALUE_TYPE cst;
9495 cst = TREE_REAL_CST (arg1);
9497 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9498 if (TREE_CODE (arg0) == NEGATE_EXPR)
9499 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9500 TREE_OPERAND (arg0, 0),
9501 build_real (TREE_TYPE (arg1),
9502 real_value_negate (&cst)));
9504 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9505 /* a CMP (-0) -> a CMP 0 */
9506 if (REAL_VALUE_MINUS_ZERO (cst))
9507 return fold_build2_loc (loc, code, type, arg0,
9508 build_real (TREE_TYPE (arg1), dconst0));
9510 /* x != NaN is always true, other ops are always false. */
9511 if (REAL_VALUE_ISNAN (cst)
9512 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9514 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9515 return omit_one_operand_loc (loc, type, tem, arg0);
9518 /* Fold comparisons against infinity. */
9519 if (REAL_VALUE_ISINF (cst)
9520 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9522 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9523 if (tem != NULL_TREE)
9524 return tem;
9528 /* If this is a comparison of a real constant with a PLUS_EXPR
9529 or a MINUS_EXPR of a real constant, we can convert it into a
9530 comparison with a revised real constant as long as no overflow
9531 occurs when unsafe_math_optimizations are enabled. */
9532 if (flag_unsafe_math_optimizations
9533 && TREE_CODE (arg1) == REAL_CST
9534 && (TREE_CODE (arg0) == PLUS_EXPR
9535 || TREE_CODE (arg0) == MINUS_EXPR)
9536 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9537 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9538 ? MINUS_EXPR : PLUS_EXPR,
9539 arg1, TREE_OPERAND (arg0, 1)))
9540 && !TREE_OVERFLOW (tem))
9541 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9543 /* Likewise, we can simplify a comparison of a real constant with
9544 a MINUS_EXPR whose first operand is also a real constant, i.e.
9545 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9546 floating-point types only if -fassociative-math is set. */
9547 if (flag_associative_math
9548 && TREE_CODE (arg1) == REAL_CST
9549 && TREE_CODE (arg0) == MINUS_EXPR
9550 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9551 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9552 arg1))
9553 && !TREE_OVERFLOW (tem))
9554 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9555 TREE_OPERAND (arg0, 1), tem);
9557 /* Fold comparisons against built-in math functions. */
9558 if (TREE_CODE (arg1) == REAL_CST
9559 && flag_unsafe_math_optimizations
9560 && ! flag_errno_math)
9562 enum built_in_function fcode = builtin_mathfn_code (arg0);
9564 if (fcode != END_BUILTINS)
9566 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9567 if (tem != NULL_TREE)
9568 return tem;
9573 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9574 && CONVERT_EXPR_P (arg0))
9576 /* If we are widening one operand of an integer comparison,
9577 see if the other operand is similarly being widened. Perhaps we
9578 can do the comparison in the narrower type. */
9579 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9580 if (tem)
9581 return tem;
9583 /* Or if we are changing signedness. */
9584 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9585 if (tem)
9586 return tem;
9589 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9590 constant, we can simplify it. */
9591 if (TREE_CODE (arg1) == INTEGER_CST
9592 && (TREE_CODE (arg0) == MIN_EXPR
9593 || TREE_CODE (arg0) == MAX_EXPR)
9594 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9596 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9597 if (tem)
9598 return tem;
9601 /* Simplify comparison of something with itself. (For IEEE
9602 floating-point, we can only do some of these simplifications.) */
9603 if (operand_equal_p (arg0, arg1, 0))
9605 switch (code)
9607 case EQ_EXPR:
9608 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9609 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9610 return constant_boolean_node (1, type);
9611 break;
9613 case GE_EXPR:
9614 case LE_EXPR:
9615 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9616 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9617 return constant_boolean_node (1, type);
9618 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9620 case NE_EXPR:
9621 /* For NE, we can only do this simplification if integer
9622 or we don't honor IEEE floating point NaNs. */
9623 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9624 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9625 break;
9626 /* ... fall through ... */
9627 case GT_EXPR:
9628 case LT_EXPR:
9629 return constant_boolean_node (0, type);
9630 default:
9631 gcc_unreachable ();
9635 /* If we are comparing an expression that just has comparisons
9636 of two integer values, arithmetic expressions of those comparisons,
9637 and constants, we can simplify it. There are only three cases
9638 to check: the two values can either be equal, the first can be
9639 greater, or the second can be greater. Fold the expression for
9640 those three values. Since each value must be 0 or 1, we have
9641 eight possibilities, each of which corresponds to the constant 0
9642 or 1 or one of the six possible comparisons.
9644 This handles common cases like (a > b) == 0 but also handles
9645 expressions like ((x > y) - (y > x)) > 0, which supposedly
9646 occur in macroized code. */
9648 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9650 tree cval1 = 0, cval2 = 0;
9651 int save_p = 0;
9653 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9654 /* Don't handle degenerate cases here; they should already
9655 have been handled anyway. */
9656 && cval1 != 0 && cval2 != 0
9657 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9658 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9659 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9660 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9661 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9662 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9663 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9665 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9666 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9668 /* We can't just pass T to eval_subst in case cval1 or cval2
9669 was the same as ARG1. */
9671 tree high_result
9672 = fold_build2_loc (loc, code, type,
9673 eval_subst (loc, arg0, cval1, maxval,
9674 cval2, minval),
9675 arg1);
9676 tree equal_result
9677 = fold_build2_loc (loc, code, type,
9678 eval_subst (loc, arg0, cval1, maxval,
9679 cval2, maxval),
9680 arg1);
9681 tree low_result
9682 = fold_build2_loc (loc, code, type,
9683 eval_subst (loc, arg0, cval1, minval,
9684 cval2, maxval),
9685 arg1);
9687 /* All three of these results should be 0 or 1. Confirm they are.
9688 Then use those values to select the proper code to use. */
9690 if (TREE_CODE (high_result) == INTEGER_CST
9691 && TREE_CODE (equal_result) == INTEGER_CST
9692 && TREE_CODE (low_result) == INTEGER_CST)
9694 /* Make a 3-bit mask with the high-order bit being the
9695 value for `>', the next for '=', and the low for '<'. */
9696 switch ((integer_onep (high_result) * 4)
9697 + (integer_onep (equal_result) * 2)
9698 + integer_onep (low_result))
9700 case 0:
9701 /* Always false. */
9702 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9703 case 1:
9704 code = LT_EXPR;
9705 break;
9706 case 2:
9707 code = EQ_EXPR;
9708 break;
9709 case 3:
9710 code = LE_EXPR;
9711 break;
9712 case 4:
9713 code = GT_EXPR;
9714 break;
9715 case 5:
9716 code = NE_EXPR;
9717 break;
9718 case 6:
9719 code = GE_EXPR;
9720 break;
9721 case 7:
9722 /* Always true. */
9723 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9726 if (save_p)
9728 tem = save_expr (build2 (code, type, cval1, cval2));
9729 SET_EXPR_LOCATION (tem, loc);
9730 return tem;
9732 return fold_build2_loc (loc, code, type, cval1, cval2);
9737 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9738 into a single range test. */
9739 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9740 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9741 && TREE_CODE (arg1) == INTEGER_CST
9742 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9743 && !integer_zerop (TREE_OPERAND (arg0, 1))
9744 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9745 && !TREE_OVERFLOW (arg1))
9747 tem = fold_div_compare (loc, code, type, arg0, arg1);
9748 if (tem != NULL_TREE)
9749 return tem;
9752 /* Fold ~X op ~Y as Y op X. */
9753 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9754 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9756 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9757 return fold_build2_loc (loc, code, type,
9758 fold_convert_loc (loc, cmp_type,
9759 TREE_OPERAND (arg1, 0)),
9760 TREE_OPERAND (arg0, 0));
9763 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9764 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9765 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9767 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9768 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9769 TREE_OPERAND (arg0, 0),
9770 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9771 fold_convert_loc (loc, cmp_type, arg1)));
9774 return NULL_TREE;
9778 /* Subroutine of fold_binary. Optimize complex multiplications of the
9779 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9780 argument EXPR represents the expression "z" of type TYPE. */
9782 static tree
9783 fold_mult_zconjz (location_t loc, tree type, tree expr)
9785 tree itype = TREE_TYPE (type);
9786 tree rpart, ipart, tem;
9788 if (TREE_CODE (expr) == COMPLEX_EXPR)
9790 rpart = TREE_OPERAND (expr, 0);
9791 ipart = TREE_OPERAND (expr, 1);
9793 else if (TREE_CODE (expr) == COMPLEX_CST)
9795 rpart = TREE_REALPART (expr);
9796 ipart = TREE_IMAGPART (expr);
9798 else
9800 expr = save_expr (expr);
9801 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9802 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9805 rpart = save_expr (rpart);
9806 ipart = save_expr (ipart);
9807 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9808 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9809 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9810 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9811 build_zero_cst (itype));
9815 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9816 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9817 guarantees that P and N have the same least significant log2(M) bits.
9818 N is not otherwise constrained. In particular, N is not normalized to
9819 0 <= N < M as is common. In general, the precise value of P is unknown.
9820 M is chosen as large as possible such that constant N can be determined.
9822 Returns M and sets *RESIDUE to N.
9824 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9825 account. This is not always possible due to PR 35705.
9828 static unsigned HOST_WIDE_INT
9829 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9830 bool allow_func_align)
9832 enum tree_code code;
9834 *residue = 0;
9836 code = TREE_CODE (expr);
9837 if (code == ADDR_EXPR)
9839 unsigned int bitalign;
9840 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9841 *residue /= BITS_PER_UNIT;
9842 return bitalign / BITS_PER_UNIT;
9844 else if (code == POINTER_PLUS_EXPR)
9846 tree op0, op1;
9847 unsigned HOST_WIDE_INT modulus;
9848 enum tree_code inner_code;
9850 op0 = TREE_OPERAND (expr, 0);
9851 STRIP_NOPS (op0);
9852 modulus = get_pointer_modulus_and_residue (op0, residue,
9853 allow_func_align);
9855 op1 = TREE_OPERAND (expr, 1);
9856 STRIP_NOPS (op1);
9857 inner_code = TREE_CODE (op1);
9858 if (inner_code == INTEGER_CST)
9860 *residue += TREE_INT_CST_LOW (op1);
9861 return modulus;
9863 else if (inner_code == MULT_EXPR)
9865 op1 = TREE_OPERAND (op1, 1);
9866 if (TREE_CODE (op1) == INTEGER_CST)
9868 unsigned HOST_WIDE_INT align;
9870 /* Compute the greatest power-of-2 divisor of op1. */
9871 align = TREE_INT_CST_LOW (op1);
9872 align &= -align;
9874 /* If align is non-zero and less than *modulus, replace
9875 *modulus with align., If align is 0, then either op1 is 0
9876 or the greatest power-of-2 divisor of op1 doesn't fit in an
9877 unsigned HOST_WIDE_INT. In either case, no additional
9878 constraint is imposed. */
9879 if (align)
9880 modulus = MIN (modulus, align);
9882 return modulus;
9887 /* If we get here, we were unable to determine anything useful about the
9888 expression. */
9889 return 1;
9892 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9893 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9895 static bool
9896 vec_cst_ctor_to_array (tree arg, tree *elts)
9898 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9900 if (TREE_CODE (arg) == VECTOR_CST)
9902 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9903 elts[i] = VECTOR_CST_ELT (arg, i);
9905 else if (TREE_CODE (arg) == CONSTRUCTOR)
9907 constructor_elt *elt;
9909 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9910 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9911 return false;
9912 else
9913 elts[i] = elt->value;
9915 else
9916 return false;
9917 for (; i < nelts; i++)
9918 elts[i]
9919 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9920 return true;
9923 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9924 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9925 NULL_TREE otherwise. */
9927 static tree
9928 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9930 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9931 tree *elts;
9932 bool need_ctor = false;
9934 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9935 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9936 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9937 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9938 return NULL_TREE;
9940 elts = XALLOCAVEC (tree, nelts * 3);
9941 if (!vec_cst_ctor_to_array (arg0, elts)
9942 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9943 return NULL_TREE;
9945 for (i = 0; i < nelts; i++)
9947 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9948 need_ctor = true;
9949 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9952 if (need_ctor)
9954 vec<constructor_elt, va_gc> *v;
9955 vec_alloc (v, nelts);
9956 for (i = 0; i < nelts; i++)
9957 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9958 return build_constructor (type, v);
9960 else
9961 return build_vector (type, &elts[2 * nelts]);
9964 /* Try to fold a pointer difference of type TYPE two address expressions of
9965 array references AREF0 and AREF1 using location LOC. Return a
9966 simplified expression for the difference or NULL_TREE. */
9968 static tree
9969 fold_addr_of_array_ref_difference (location_t loc, tree type,
9970 tree aref0, tree aref1)
9972 tree base0 = TREE_OPERAND (aref0, 0);
9973 tree base1 = TREE_OPERAND (aref1, 0);
9974 tree base_offset = build_int_cst (type, 0);
9976 /* If the bases are array references as well, recurse. If the bases
9977 are pointer indirections compute the difference of the pointers.
9978 If the bases are equal, we are set. */
9979 if ((TREE_CODE (base0) == ARRAY_REF
9980 && TREE_CODE (base1) == ARRAY_REF
9981 && (base_offset
9982 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9983 || (INDIRECT_REF_P (base0)
9984 && INDIRECT_REF_P (base1)
9985 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9986 TREE_OPERAND (base0, 0),
9987 TREE_OPERAND (base1, 0))))
9988 || operand_equal_p (base0, base1, 0))
9990 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9991 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9992 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9993 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9994 return fold_build2_loc (loc, PLUS_EXPR, type,
9995 base_offset,
9996 fold_build2_loc (loc, MULT_EXPR, type,
9997 diff, esz));
9999 return NULL_TREE;
10002 /* If the real or vector real constant CST of type TYPE has an exact
10003 inverse, return it, else return NULL. */
10005 static tree
10006 exact_inverse (tree type, tree cst)
10008 REAL_VALUE_TYPE r;
10009 tree unit_type, *elts;
10010 enum machine_mode mode;
10011 unsigned vec_nelts, i;
10013 switch (TREE_CODE (cst))
10015 case REAL_CST:
10016 r = TREE_REAL_CST (cst);
10018 if (exact_real_inverse (TYPE_MODE (type), &r))
10019 return build_real (type, r);
10021 return NULL_TREE;
10023 case VECTOR_CST:
10024 vec_nelts = VECTOR_CST_NELTS (cst);
10025 elts = XALLOCAVEC (tree, vec_nelts);
10026 unit_type = TREE_TYPE (type);
10027 mode = TYPE_MODE (unit_type);
10029 for (i = 0; i < vec_nelts; i++)
10031 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10032 if (!exact_real_inverse (mode, &r))
10033 return NULL_TREE;
10034 elts[i] = build_real (unit_type, r);
10037 return build_vector (type, elts);
10039 default:
10040 return NULL_TREE;
10044 /* Mask out the tz least significant bits of X of type TYPE where
10045 tz is the number of trailing zeroes in Y. */
10046 static double_int
10047 mask_with_tz (tree type, double_int x, double_int y)
10049 int tz = y.trailing_zeros ();
10051 if (tz > 0)
10053 double_int mask;
10055 mask = ~double_int::mask (tz);
10056 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
10057 return mask & x;
10059 return x;
10062 /* Return true when T is an address and is known to be nonzero.
10063 For floating point we further ensure that T is not denormal.
10064 Similar logic is present in nonzero_address in rtlanal.h.
10066 If the return value is based on the assumption that signed overflow
10067 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10068 change *STRICT_OVERFLOW_P. */
10070 static bool
10071 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10073 tree type = TREE_TYPE (t);
10074 enum tree_code code;
10076 /* Doing something useful for floating point would need more work. */
10077 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10078 return false;
10080 code = TREE_CODE (t);
10081 switch (TREE_CODE_CLASS (code))
10083 case tcc_unary:
10084 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10085 strict_overflow_p);
10086 case tcc_binary:
10087 case tcc_comparison:
10088 return tree_binary_nonzero_warnv_p (code, type,
10089 TREE_OPERAND (t, 0),
10090 TREE_OPERAND (t, 1),
10091 strict_overflow_p);
10092 case tcc_constant:
10093 case tcc_declaration:
10094 case tcc_reference:
10095 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10097 default:
10098 break;
10101 switch (code)
10103 case TRUTH_NOT_EXPR:
10104 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10105 strict_overflow_p);
10107 case TRUTH_AND_EXPR:
10108 case TRUTH_OR_EXPR:
10109 case TRUTH_XOR_EXPR:
10110 return tree_binary_nonzero_warnv_p (code, type,
10111 TREE_OPERAND (t, 0),
10112 TREE_OPERAND (t, 1),
10113 strict_overflow_p);
10115 case COND_EXPR:
10116 case CONSTRUCTOR:
10117 case OBJ_TYPE_REF:
10118 case ASSERT_EXPR:
10119 case ADDR_EXPR:
10120 case WITH_SIZE_EXPR:
10121 case SSA_NAME:
10122 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10124 case COMPOUND_EXPR:
10125 case MODIFY_EXPR:
10126 case BIND_EXPR:
10127 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10128 strict_overflow_p);
10130 case SAVE_EXPR:
10131 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10132 strict_overflow_p);
10134 case CALL_EXPR:
10136 tree fndecl = get_callee_fndecl (t);
10137 if (!fndecl) return false;
10138 if (flag_delete_null_pointer_checks && !flag_check_new
10139 && DECL_IS_OPERATOR_NEW (fndecl)
10140 && !TREE_NOTHROW (fndecl))
10141 return true;
10142 if (flag_delete_null_pointer_checks
10143 && lookup_attribute ("returns_nonnull",
10144 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10145 return true;
10146 return alloca_call_p (t);
10149 default:
10150 break;
10152 return false;
10155 /* Return true when T is an address and is known to be nonzero.
10156 Handle warnings about undefined signed overflow. */
10158 static bool
10159 tree_expr_nonzero_p (tree t)
10161 bool ret, strict_overflow_p;
10163 strict_overflow_p = false;
10164 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10165 if (strict_overflow_p)
10166 fold_overflow_warning (("assuming signed overflow does not occur when "
10167 "determining that expression is always "
10168 "non-zero"),
10169 WARN_STRICT_OVERFLOW_MISC);
10170 return ret;
10173 /* Fold a binary expression of code CODE and type TYPE with operands
10174 OP0 and OP1. LOC is the location of the resulting expression.
10175 Return the folded expression if folding is successful. Otherwise,
10176 return NULL_TREE. */
10178 static tree
10179 fold_binary_loc_1 (location_t loc,
10180 enum tree_code code, tree type, tree op0, tree op1)
10182 enum tree_code_class kind = TREE_CODE_CLASS (code);
10183 tree arg0, arg1, tem;
10184 tree t1 = NULL_TREE;
10185 bool strict_overflow_p;
10186 unsigned int prec;
10188 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10189 && TREE_CODE_LENGTH (code) == 2
10190 && op0 != NULL_TREE
10191 && op1 != NULL_TREE);
10193 arg0 = op0;
10194 arg1 = op1;
10196 /* Strip any conversions that don't change the mode. This is
10197 safe for every expression, except for a comparison expression
10198 because its signedness is derived from its operands. So, in
10199 the latter case, only strip conversions that don't change the
10200 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10201 preserved.
10203 Note that this is done as an internal manipulation within the
10204 constant folder, in order to find the simplest representation
10205 of the arguments so that their form can be studied. In any
10206 cases, the appropriate type conversions should be put back in
10207 the tree that will get out of the constant folder. */
10209 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10211 STRIP_SIGN_NOPS (arg0);
10212 STRIP_SIGN_NOPS (arg1);
10214 else
10216 STRIP_NOPS (arg0);
10217 STRIP_NOPS (arg1);
10220 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10221 constant but we can't do arithmetic on them. */
10222 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10223 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10224 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10225 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10226 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10227 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10228 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10230 if (kind == tcc_binary)
10232 /* Make sure type and arg0 have the same saturating flag. */
10233 gcc_assert (TYPE_SATURATING (type)
10234 == TYPE_SATURATING (TREE_TYPE (arg0)));
10235 tem = const_binop (code, arg0, arg1);
10237 else if (kind == tcc_comparison)
10238 tem = fold_relational_const (code, type, arg0, arg1);
10239 else
10240 tem = NULL_TREE;
10242 if (tem != NULL_TREE)
10244 if (TREE_TYPE (tem) != type)
10245 tem = fold_convert_loc (loc, type, tem);
10246 return tem;
10250 /* If this is a commutative operation, and ARG0 is a constant, move it
10251 to ARG1 to reduce the number of tests below. */
10252 if (commutative_tree_code (code)
10253 && tree_swap_operands_p (arg0, arg1, true))
10254 return fold_build2_loc (loc, code, type, op1, op0);
10256 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10258 First check for cases where an arithmetic operation is applied to a
10259 compound, conditional, or comparison operation. Push the arithmetic
10260 operation inside the compound or conditional to see if any folding
10261 can then be done. Convert comparison to conditional for this purpose.
10262 The also optimizes non-constant cases that used to be done in
10263 expand_expr.
10265 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10266 one of the operands is a comparison and the other is a comparison, a
10267 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10268 code below would make the expression more complex. Change it to a
10269 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10270 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10272 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10273 || code == EQ_EXPR || code == NE_EXPR)
10274 && TREE_CODE (type) != VECTOR_TYPE
10275 && ((truth_value_p (TREE_CODE (arg0))
10276 && (truth_value_p (TREE_CODE (arg1))
10277 || (TREE_CODE (arg1) == BIT_AND_EXPR
10278 && integer_onep (TREE_OPERAND (arg1, 1)))))
10279 || (truth_value_p (TREE_CODE (arg1))
10280 && (truth_value_p (TREE_CODE (arg0))
10281 || (TREE_CODE (arg0) == BIT_AND_EXPR
10282 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10284 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10285 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10286 : TRUTH_XOR_EXPR,
10287 boolean_type_node,
10288 fold_convert_loc (loc, boolean_type_node, arg0),
10289 fold_convert_loc (loc, boolean_type_node, arg1));
10291 if (code == EQ_EXPR)
10292 tem = invert_truthvalue_loc (loc, tem);
10294 return fold_convert_loc (loc, type, tem);
10297 if (TREE_CODE_CLASS (code) == tcc_binary
10298 || TREE_CODE_CLASS (code) == tcc_comparison)
10300 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10302 tem = fold_build2_loc (loc, code, type,
10303 fold_convert_loc (loc, TREE_TYPE (op0),
10304 TREE_OPERAND (arg0, 1)), op1);
10305 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10306 tem);
10308 if (TREE_CODE (arg1) == COMPOUND_EXPR
10309 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10311 tem = fold_build2_loc (loc, code, type, op0,
10312 fold_convert_loc (loc, TREE_TYPE (op1),
10313 TREE_OPERAND (arg1, 1)));
10314 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10315 tem);
10318 if (TREE_CODE (arg0) == COND_EXPR
10319 || TREE_CODE (arg0) == VEC_COND_EXPR
10320 || COMPARISON_CLASS_P (arg0))
10322 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10323 arg0, arg1,
10324 /*cond_first_p=*/1);
10325 if (tem != NULL_TREE)
10326 return tem;
10329 if (TREE_CODE (arg1) == COND_EXPR
10330 || TREE_CODE (arg1) == VEC_COND_EXPR
10331 || COMPARISON_CLASS_P (arg1))
10333 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10334 arg1, arg0,
10335 /*cond_first_p=*/0);
10336 if (tem != NULL_TREE)
10337 return tem;
10341 switch (code)
10343 case MEM_REF:
10344 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10345 if (TREE_CODE (arg0) == ADDR_EXPR
10346 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10348 tree iref = TREE_OPERAND (arg0, 0);
10349 return fold_build2 (MEM_REF, type,
10350 TREE_OPERAND (iref, 0),
10351 int_const_binop (PLUS_EXPR, arg1,
10352 TREE_OPERAND (iref, 1)));
10355 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10356 if (TREE_CODE (arg0) == ADDR_EXPR
10357 && handled_component_p (TREE_OPERAND (arg0, 0)))
10359 tree base;
10360 HOST_WIDE_INT coffset;
10361 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10362 &coffset);
10363 if (!base)
10364 return NULL_TREE;
10365 return fold_build2 (MEM_REF, type,
10366 build_fold_addr_expr (base),
10367 int_const_binop (PLUS_EXPR, arg1,
10368 size_int (coffset)));
10371 return NULL_TREE;
10373 case POINTER_PLUS_EXPR:
10374 /* 0 +p index -> (type)index */
10375 if (integer_zerop (arg0))
10376 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10378 /* PTR +p 0 -> PTR */
10379 if (integer_zerop (arg1))
10380 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10382 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10383 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10384 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10385 return fold_convert_loc (loc, type,
10386 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10387 fold_convert_loc (loc, sizetype,
10388 arg1),
10389 fold_convert_loc (loc, sizetype,
10390 arg0)));
10392 /* (PTR +p B) +p A -> PTR +p (B + A) */
10393 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10395 tree inner;
10396 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10397 tree arg00 = TREE_OPERAND (arg0, 0);
10398 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10399 arg01, fold_convert_loc (loc, sizetype, arg1));
10400 return fold_convert_loc (loc, type,
10401 fold_build_pointer_plus_loc (loc,
10402 arg00, inner));
10405 /* PTR_CST +p CST -> CST1 */
10406 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10407 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10408 fold_convert_loc (loc, type, arg1));
10410 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10411 of the array. Loop optimizer sometimes produce this type of
10412 expressions. */
10413 if (TREE_CODE (arg0) == ADDR_EXPR)
10415 tem = try_move_mult_to_index (loc, arg0,
10416 fold_convert_loc (loc,
10417 ssizetype, arg1));
10418 if (tem)
10419 return fold_convert_loc (loc, type, tem);
10422 return NULL_TREE;
10424 case PLUS_EXPR:
10425 /* A + (-B) -> A - B */
10426 if (TREE_CODE (arg1) == NEGATE_EXPR
10427 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10428 return fold_build2_loc (loc, MINUS_EXPR, type,
10429 fold_convert_loc (loc, type, arg0),
10430 fold_convert_loc (loc, type,
10431 TREE_OPERAND (arg1, 0)));
10432 /* (-A) + B -> B - A */
10433 if (TREE_CODE (arg0) == NEGATE_EXPR
10434 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10435 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10436 return fold_build2_loc (loc, MINUS_EXPR, type,
10437 fold_convert_loc (loc, type, arg1),
10438 fold_convert_loc (loc, type,
10439 TREE_OPERAND (arg0, 0)));
10441 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10443 /* Convert ~A + 1 to -A. */
10444 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10445 && integer_onep (arg1))
10446 return fold_build1_loc (loc, NEGATE_EXPR, type,
10447 fold_convert_loc (loc, type,
10448 TREE_OPERAND (arg0, 0)));
10450 /* ~X + X is -1. */
10451 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10452 && !TYPE_OVERFLOW_TRAPS (type))
10454 tree tem = TREE_OPERAND (arg0, 0);
10456 STRIP_NOPS (tem);
10457 if (operand_equal_p (tem, arg1, 0))
10459 t1 = build_all_ones_cst (type);
10460 return omit_one_operand_loc (loc, type, t1, arg1);
10464 /* X + ~X is -1. */
10465 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10466 && !TYPE_OVERFLOW_TRAPS (type))
10468 tree tem = TREE_OPERAND (arg1, 0);
10470 STRIP_NOPS (tem);
10471 if (operand_equal_p (arg0, tem, 0))
10473 t1 = build_all_ones_cst (type);
10474 return omit_one_operand_loc (loc, type, t1, arg0);
10478 /* X + (X / CST) * -CST is X % CST. */
10479 if (TREE_CODE (arg1) == MULT_EXPR
10480 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10481 && operand_equal_p (arg0,
10482 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10484 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10485 tree cst1 = TREE_OPERAND (arg1, 1);
10486 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10487 cst1, cst0);
10488 if (sum && integer_zerop (sum))
10489 return fold_convert_loc (loc, type,
10490 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10491 TREE_TYPE (arg0), arg0,
10492 cst0));
10496 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10497 one. Make sure the type is not saturating and has the signedness of
10498 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10499 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10500 if ((TREE_CODE (arg0) == MULT_EXPR
10501 || TREE_CODE (arg1) == MULT_EXPR)
10502 && !TYPE_SATURATING (type)
10503 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10504 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10505 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10507 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10508 if (tem)
10509 return tem;
10512 if (! FLOAT_TYPE_P (type))
10514 if (integer_zerop (arg1))
10515 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10517 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10518 with a constant, and the two constants have no bits in common,
10519 we should treat this as a BIT_IOR_EXPR since this may produce more
10520 simplifications. */
10521 if (TREE_CODE (arg0) == BIT_AND_EXPR
10522 && TREE_CODE (arg1) == BIT_AND_EXPR
10523 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10524 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10525 && integer_zerop (const_binop (BIT_AND_EXPR,
10526 TREE_OPERAND (arg0, 1),
10527 TREE_OPERAND (arg1, 1))))
10529 code = BIT_IOR_EXPR;
10530 goto bit_ior;
10533 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10534 (plus (plus (mult) (mult)) (foo)) so that we can
10535 take advantage of the factoring cases below. */
10536 if (TYPE_OVERFLOW_WRAPS (type)
10537 && (((TREE_CODE (arg0) == PLUS_EXPR
10538 || TREE_CODE (arg0) == MINUS_EXPR)
10539 && TREE_CODE (arg1) == MULT_EXPR)
10540 || ((TREE_CODE (arg1) == PLUS_EXPR
10541 || TREE_CODE (arg1) == MINUS_EXPR)
10542 && TREE_CODE (arg0) == MULT_EXPR)))
10544 tree parg0, parg1, parg, marg;
10545 enum tree_code pcode;
10547 if (TREE_CODE (arg1) == MULT_EXPR)
10548 parg = arg0, marg = arg1;
10549 else
10550 parg = arg1, marg = arg0;
10551 pcode = TREE_CODE (parg);
10552 parg0 = TREE_OPERAND (parg, 0);
10553 parg1 = TREE_OPERAND (parg, 1);
10554 STRIP_NOPS (parg0);
10555 STRIP_NOPS (parg1);
10557 if (TREE_CODE (parg0) == MULT_EXPR
10558 && TREE_CODE (parg1) != MULT_EXPR)
10559 return fold_build2_loc (loc, pcode, type,
10560 fold_build2_loc (loc, PLUS_EXPR, type,
10561 fold_convert_loc (loc, type,
10562 parg0),
10563 fold_convert_loc (loc, type,
10564 marg)),
10565 fold_convert_loc (loc, type, parg1));
10566 if (TREE_CODE (parg0) != MULT_EXPR
10567 && TREE_CODE (parg1) == MULT_EXPR)
10568 return
10569 fold_build2_loc (loc, PLUS_EXPR, type,
10570 fold_convert_loc (loc, type, parg0),
10571 fold_build2_loc (loc, pcode, type,
10572 fold_convert_loc (loc, type, marg),
10573 fold_convert_loc (loc, type,
10574 parg1)));
10577 else
10579 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10580 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10581 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10583 /* Likewise if the operands are reversed. */
10584 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10585 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10587 /* Convert X + -C into X - C. */
10588 if (TREE_CODE (arg1) == REAL_CST
10589 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10591 tem = fold_negate_const (arg1, type);
10592 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10593 return fold_build2_loc (loc, MINUS_EXPR, type,
10594 fold_convert_loc (loc, type, arg0),
10595 fold_convert_loc (loc, type, tem));
10598 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10599 to __complex__ ( x, y ). This is not the same for SNaNs or
10600 if signed zeros are involved. */
10601 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10602 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10603 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10605 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10606 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10607 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10608 bool arg0rz = false, arg0iz = false;
10609 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10610 || (arg0i && (arg0iz = real_zerop (arg0i))))
10612 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10613 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10614 if (arg0rz && arg1i && real_zerop (arg1i))
10616 tree rp = arg1r ? arg1r
10617 : build1 (REALPART_EXPR, rtype, arg1);
10618 tree ip = arg0i ? arg0i
10619 : build1 (IMAGPART_EXPR, rtype, arg0);
10620 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10622 else if (arg0iz && arg1r && real_zerop (arg1r))
10624 tree rp = arg0r ? arg0r
10625 : build1 (REALPART_EXPR, rtype, arg0);
10626 tree ip = arg1i ? arg1i
10627 : build1 (IMAGPART_EXPR, rtype, arg1);
10628 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10633 if (flag_unsafe_math_optimizations
10634 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10635 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10636 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10637 return tem;
10639 /* Convert x+x into x*2.0. */
10640 if (operand_equal_p (arg0, arg1, 0)
10641 && SCALAR_FLOAT_TYPE_P (type))
10642 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10643 build_real (type, dconst2));
10645 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10646 We associate floats only if the user has specified
10647 -fassociative-math. */
10648 if (flag_associative_math
10649 && TREE_CODE (arg1) == PLUS_EXPR
10650 && TREE_CODE (arg0) != MULT_EXPR)
10652 tree tree10 = TREE_OPERAND (arg1, 0);
10653 tree tree11 = TREE_OPERAND (arg1, 1);
10654 if (TREE_CODE (tree11) == MULT_EXPR
10655 && TREE_CODE (tree10) == MULT_EXPR)
10657 tree tree0;
10658 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10659 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10662 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10663 We associate floats only if the user has specified
10664 -fassociative-math. */
10665 if (flag_associative_math
10666 && TREE_CODE (arg0) == PLUS_EXPR
10667 && TREE_CODE (arg1) != MULT_EXPR)
10669 tree tree00 = TREE_OPERAND (arg0, 0);
10670 tree tree01 = TREE_OPERAND (arg0, 1);
10671 if (TREE_CODE (tree01) == MULT_EXPR
10672 && TREE_CODE (tree00) == MULT_EXPR)
10674 tree tree0;
10675 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10676 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10681 bit_rotate:
10682 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10683 is a rotate of A by C1 bits. */
10684 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10685 is a rotate of A by B bits. */
10687 enum tree_code code0, code1;
10688 tree rtype;
10689 code0 = TREE_CODE (arg0);
10690 code1 = TREE_CODE (arg1);
10691 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10692 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10693 && operand_equal_p (TREE_OPERAND (arg0, 0),
10694 TREE_OPERAND (arg1, 0), 0)
10695 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10696 TYPE_UNSIGNED (rtype))
10697 /* Only create rotates in complete modes. Other cases are not
10698 expanded properly. */
10699 && (element_precision (rtype)
10700 == element_precision (TYPE_MODE (rtype))))
10702 tree tree01, tree11;
10703 enum tree_code code01, code11;
10705 tree01 = TREE_OPERAND (arg0, 1);
10706 tree11 = TREE_OPERAND (arg1, 1);
10707 STRIP_NOPS (tree01);
10708 STRIP_NOPS (tree11);
10709 code01 = TREE_CODE (tree01);
10710 code11 = TREE_CODE (tree11);
10711 if (code01 == INTEGER_CST
10712 && code11 == INTEGER_CST
10713 && TREE_INT_CST_HIGH (tree01) == 0
10714 && TREE_INT_CST_HIGH (tree11) == 0
10715 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10716 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10718 tem = build2_loc (loc, LROTATE_EXPR,
10719 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10720 TREE_OPERAND (arg0, 0),
10721 code0 == LSHIFT_EXPR ? tree01 : tree11);
10722 return fold_convert_loc (loc, type, tem);
10724 else if (code11 == MINUS_EXPR)
10726 tree tree110, tree111;
10727 tree110 = TREE_OPERAND (tree11, 0);
10728 tree111 = TREE_OPERAND (tree11, 1);
10729 STRIP_NOPS (tree110);
10730 STRIP_NOPS (tree111);
10731 if (TREE_CODE (tree110) == INTEGER_CST
10732 && 0 == compare_tree_int (tree110,
10733 element_precision
10734 (TREE_TYPE (TREE_OPERAND
10735 (arg0, 0))))
10736 && operand_equal_p (tree01, tree111, 0))
10737 return
10738 fold_convert_loc (loc, type,
10739 build2 ((code0 == LSHIFT_EXPR
10740 ? LROTATE_EXPR
10741 : RROTATE_EXPR),
10742 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10743 TREE_OPERAND (arg0, 0), tree01));
10745 else if (code01 == MINUS_EXPR)
10747 tree tree010, tree011;
10748 tree010 = TREE_OPERAND (tree01, 0);
10749 tree011 = TREE_OPERAND (tree01, 1);
10750 STRIP_NOPS (tree010);
10751 STRIP_NOPS (tree011);
10752 if (TREE_CODE (tree010) == INTEGER_CST
10753 && 0 == compare_tree_int (tree010,
10754 element_precision
10755 (TREE_TYPE (TREE_OPERAND
10756 (arg0, 0))))
10757 && operand_equal_p (tree11, tree011, 0))
10758 return fold_convert_loc
10759 (loc, type,
10760 build2 ((code0 != LSHIFT_EXPR
10761 ? LROTATE_EXPR
10762 : RROTATE_EXPR),
10763 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10764 TREE_OPERAND (arg0, 0), tree11));
10769 associate:
10770 /* In most languages, can't associate operations on floats through
10771 parentheses. Rather than remember where the parentheses were, we
10772 don't associate floats at all, unless the user has specified
10773 -fassociative-math.
10774 And, we need to make sure type is not saturating. */
10776 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10777 && !TYPE_SATURATING (type))
10779 tree var0, con0, lit0, minus_lit0;
10780 tree var1, con1, lit1, minus_lit1;
10781 tree atype = type;
10782 bool ok = true;
10784 /* Split both trees into variables, constants, and literals. Then
10785 associate each group together, the constants with literals,
10786 then the result with variables. This increases the chances of
10787 literals being recombined later and of generating relocatable
10788 expressions for the sum of a constant and literal. */
10789 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10790 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10791 code == MINUS_EXPR);
10793 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10794 if (code == MINUS_EXPR)
10795 code = PLUS_EXPR;
10797 /* With undefined overflow prefer doing association in a type
10798 which wraps on overflow, if that is one of the operand types. */
10799 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10800 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10802 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10803 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10804 atype = TREE_TYPE (arg0);
10805 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10806 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10807 atype = TREE_TYPE (arg1);
10808 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10811 /* With undefined overflow we can only associate constants with one
10812 variable, and constants whose association doesn't overflow. */
10813 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10814 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10816 if (var0 && var1)
10818 tree tmp0 = var0;
10819 tree tmp1 = var1;
10821 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10822 tmp0 = TREE_OPERAND (tmp0, 0);
10823 if (CONVERT_EXPR_P (tmp0)
10824 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10825 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10826 <= TYPE_PRECISION (atype)))
10827 tmp0 = TREE_OPERAND (tmp0, 0);
10828 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10829 tmp1 = TREE_OPERAND (tmp1, 0);
10830 if (CONVERT_EXPR_P (tmp1)
10831 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10832 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10833 <= TYPE_PRECISION (atype)))
10834 tmp1 = TREE_OPERAND (tmp1, 0);
10835 /* The only case we can still associate with two variables
10836 is if they are the same, modulo negation and bit-pattern
10837 preserving conversions. */
10838 if (!operand_equal_p (tmp0, tmp1, 0))
10839 ok = false;
10843 /* Only do something if we found more than two objects. Otherwise,
10844 nothing has changed and we risk infinite recursion. */
10845 if (ok
10846 && (2 < ((var0 != 0) + (var1 != 0)
10847 + (con0 != 0) + (con1 != 0)
10848 + (lit0 != 0) + (lit1 != 0)
10849 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10851 bool any_overflows = false;
10852 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10853 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10854 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10855 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10856 var0 = associate_trees (loc, var0, var1, code, atype);
10857 con0 = associate_trees (loc, con0, con1, code, atype);
10858 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10859 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10860 code, atype);
10862 /* Preserve the MINUS_EXPR if the negative part of the literal is
10863 greater than the positive part. Otherwise, the multiplicative
10864 folding code (i.e extract_muldiv) may be fooled in case
10865 unsigned constants are subtracted, like in the following
10866 example: ((X*2 + 4) - 8U)/2. */
10867 if (minus_lit0 && lit0)
10869 if (TREE_CODE (lit0) == INTEGER_CST
10870 && TREE_CODE (minus_lit0) == INTEGER_CST
10871 && tree_int_cst_lt (lit0, minus_lit0))
10873 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10874 MINUS_EXPR, atype);
10875 lit0 = 0;
10877 else
10879 lit0 = associate_trees (loc, lit0, minus_lit0,
10880 MINUS_EXPR, atype);
10881 minus_lit0 = 0;
10885 /* Don't introduce overflows through reassociation. */
10886 if (!any_overflows
10887 && ((lit0 && TREE_OVERFLOW (lit0))
10888 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10889 return NULL_TREE;
10891 if (minus_lit0)
10893 if (con0 == 0)
10894 return
10895 fold_convert_loc (loc, type,
10896 associate_trees (loc, var0, minus_lit0,
10897 MINUS_EXPR, atype));
10898 else
10900 con0 = associate_trees (loc, con0, minus_lit0,
10901 MINUS_EXPR, atype);
10902 return
10903 fold_convert_loc (loc, type,
10904 associate_trees (loc, var0, con0,
10905 PLUS_EXPR, atype));
10909 con0 = associate_trees (loc, con0, lit0, code, atype);
10910 return
10911 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10912 code, atype));
10916 return NULL_TREE;
10918 case MINUS_EXPR:
10919 /* Pointer simplifications for subtraction, simple reassociations. */
10920 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10922 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10923 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10924 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10926 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10927 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10928 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10929 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10930 return fold_build2_loc (loc, PLUS_EXPR, type,
10931 fold_build2_loc (loc, MINUS_EXPR, type,
10932 arg00, arg10),
10933 fold_build2_loc (loc, MINUS_EXPR, type,
10934 arg01, arg11));
10936 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10937 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10939 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10940 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10941 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10942 fold_convert_loc (loc, type, arg1));
10943 if (tmp)
10944 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10947 /* A - (-B) -> A + B */
10948 if (TREE_CODE (arg1) == NEGATE_EXPR)
10949 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10950 fold_convert_loc (loc, type,
10951 TREE_OPERAND (arg1, 0)));
10952 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10953 if (TREE_CODE (arg0) == NEGATE_EXPR
10954 && negate_expr_p (arg1)
10955 && reorder_operands_p (arg0, arg1))
10956 return fold_build2_loc (loc, MINUS_EXPR, type,
10957 fold_convert_loc (loc, type,
10958 negate_expr (arg1)),
10959 fold_convert_loc (loc, type,
10960 TREE_OPERAND (arg0, 0)));
10961 /* Convert -A - 1 to ~A. */
10962 if (TREE_CODE (type) != COMPLEX_TYPE
10963 && TREE_CODE (arg0) == NEGATE_EXPR
10964 && integer_onep (arg1)
10965 && !TYPE_OVERFLOW_TRAPS (type))
10966 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10967 fold_convert_loc (loc, type,
10968 TREE_OPERAND (arg0, 0)));
10970 /* Convert -1 - A to ~A. */
10971 if (TREE_CODE (type) != COMPLEX_TYPE
10972 && integer_all_onesp (arg0))
10973 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10976 /* X - (X / Y) * Y is X % Y. */
10977 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10978 && TREE_CODE (arg1) == MULT_EXPR
10979 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10980 && operand_equal_p (arg0,
10981 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10982 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10983 TREE_OPERAND (arg1, 1), 0))
10984 return
10985 fold_convert_loc (loc, type,
10986 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10987 arg0, TREE_OPERAND (arg1, 1)));
10989 if (! FLOAT_TYPE_P (type))
10991 if (integer_zerop (arg0))
10992 return negate_expr (fold_convert_loc (loc, type, arg1));
10993 if (integer_zerop (arg1))
10994 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10996 /* Fold A - (A & B) into ~B & A. */
10997 if (!TREE_SIDE_EFFECTS (arg0)
10998 && TREE_CODE (arg1) == BIT_AND_EXPR)
11000 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
11002 tree arg10 = fold_convert_loc (loc, type,
11003 TREE_OPERAND (arg1, 0));
11004 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11005 fold_build1_loc (loc, BIT_NOT_EXPR,
11006 type, arg10),
11007 fold_convert_loc (loc, type, arg0));
11009 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11011 tree arg11 = fold_convert_loc (loc,
11012 type, TREE_OPERAND (arg1, 1));
11013 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11014 fold_build1_loc (loc, BIT_NOT_EXPR,
11015 type, arg11),
11016 fold_convert_loc (loc, type, arg0));
11020 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
11021 any power of 2 minus 1. */
11022 if (TREE_CODE (arg0) == BIT_AND_EXPR
11023 && TREE_CODE (arg1) == BIT_AND_EXPR
11024 && operand_equal_p (TREE_OPERAND (arg0, 0),
11025 TREE_OPERAND (arg1, 0), 0))
11027 tree mask0 = TREE_OPERAND (arg0, 1);
11028 tree mask1 = TREE_OPERAND (arg1, 1);
11029 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
11031 if (operand_equal_p (tem, mask1, 0))
11033 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
11034 TREE_OPERAND (arg0, 0), mask1);
11035 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
11040 /* See if ARG1 is zero and X - ARG1 reduces to X. */
11041 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
11042 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11044 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
11045 ARG0 is zero and X + ARG0 reduces to X, since that would mean
11046 (-ARG1 + ARG0) reduces to -ARG1. */
11047 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
11048 return negate_expr (fold_convert_loc (loc, type, arg1));
11050 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11051 __complex__ ( x, -y ). This is not the same for SNaNs or if
11052 signed zeros are involved. */
11053 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11054 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11055 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11057 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11058 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11059 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11060 bool arg0rz = false, arg0iz = false;
11061 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11062 || (arg0i && (arg0iz = real_zerop (arg0i))))
11064 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11065 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11066 if (arg0rz && arg1i && real_zerop (arg1i))
11068 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11069 arg1r ? arg1r
11070 : build1 (REALPART_EXPR, rtype, arg1));
11071 tree ip = arg0i ? arg0i
11072 : build1 (IMAGPART_EXPR, rtype, arg0);
11073 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11075 else if (arg0iz && arg1r && real_zerop (arg1r))
11077 tree rp = arg0r ? arg0r
11078 : build1 (REALPART_EXPR, rtype, arg0);
11079 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11080 arg1i ? arg1i
11081 : build1 (IMAGPART_EXPR, rtype, arg1));
11082 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11087 /* Fold &x - &x. This can happen from &x.foo - &x.
11088 This is unsafe for certain floats even in non-IEEE formats.
11089 In IEEE, it is unsafe because it does wrong for NaNs.
11090 Also note that operand_equal_p is always false if an operand
11091 is volatile. */
11093 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
11094 && operand_equal_p (arg0, arg1, 0))
11095 return build_zero_cst (type);
11097 /* A - B -> A + (-B) if B is easily negatable. */
11098 if (negate_expr_p (arg1)
11099 && ((FLOAT_TYPE_P (type)
11100 /* Avoid this transformation if B is a positive REAL_CST. */
11101 && (TREE_CODE (arg1) != REAL_CST
11102 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
11103 || INTEGRAL_TYPE_P (type)))
11104 return fold_build2_loc (loc, PLUS_EXPR, type,
11105 fold_convert_loc (loc, type, arg0),
11106 fold_convert_loc (loc, type,
11107 negate_expr (arg1)));
11109 /* Try folding difference of addresses. */
11111 HOST_WIDE_INT diff;
11113 if ((TREE_CODE (arg0) == ADDR_EXPR
11114 || TREE_CODE (arg1) == ADDR_EXPR)
11115 && ptr_difference_const (arg0, arg1, &diff))
11116 return build_int_cst_type (type, diff);
11119 /* Fold &a[i] - &a[j] to i-j. */
11120 if (TREE_CODE (arg0) == ADDR_EXPR
11121 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11122 && TREE_CODE (arg1) == ADDR_EXPR
11123 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11125 tree tem = fold_addr_of_array_ref_difference (loc, type,
11126 TREE_OPERAND (arg0, 0),
11127 TREE_OPERAND (arg1, 0));
11128 if (tem)
11129 return tem;
11132 if (FLOAT_TYPE_P (type)
11133 && flag_unsafe_math_optimizations
11134 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11135 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11136 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11137 return tem;
11139 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11140 one. Make sure the type is not saturating and has the signedness of
11141 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11142 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11143 if ((TREE_CODE (arg0) == MULT_EXPR
11144 || TREE_CODE (arg1) == MULT_EXPR)
11145 && !TYPE_SATURATING (type)
11146 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11147 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11148 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11150 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11151 if (tem)
11152 return tem;
11155 goto associate;
11157 case MULT_EXPR:
11158 /* (-A) * (-B) -> A * B */
11159 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11160 return fold_build2_loc (loc, MULT_EXPR, type,
11161 fold_convert_loc (loc, type,
11162 TREE_OPERAND (arg0, 0)),
11163 fold_convert_loc (loc, type,
11164 negate_expr (arg1)));
11165 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11166 return fold_build2_loc (loc, MULT_EXPR, type,
11167 fold_convert_loc (loc, type,
11168 negate_expr (arg0)),
11169 fold_convert_loc (loc, type,
11170 TREE_OPERAND (arg1, 0)));
11172 if (! FLOAT_TYPE_P (type))
11174 if (integer_zerop (arg1))
11175 return omit_one_operand_loc (loc, type, arg1, arg0);
11176 if (integer_onep (arg1))
11177 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11178 /* Transform x * -1 into -x. Make sure to do the negation
11179 on the original operand with conversions not stripped
11180 because we can only strip non-sign-changing conversions. */
11181 if (integer_minus_onep (arg1))
11182 return fold_convert_loc (loc, type, negate_expr (op0));
11183 /* Transform x * -C into -x * C if x is easily negatable. */
11184 if (TREE_CODE (arg1) == INTEGER_CST
11185 && tree_int_cst_sgn (arg1) == -1
11186 && negate_expr_p (arg0)
11187 && (tem = negate_expr (arg1)) != arg1
11188 && !TREE_OVERFLOW (tem))
11189 return fold_build2_loc (loc, MULT_EXPR, type,
11190 fold_convert_loc (loc, type,
11191 negate_expr (arg0)),
11192 tem);
11194 /* (a * (1 << b)) is (a << b) */
11195 if (TREE_CODE (arg1) == LSHIFT_EXPR
11196 && integer_onep (TREE_OPERAND (arg1, 0)))
11197 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11198 TREE_OPERAND (arg1, 1));
11199 if (TREE_CODE (arg0) == LSHIFT_EXPR
11200 && integer_onep (TREE_OPERAND (arg0, 0)))
11201 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11202 TREE_OPERAND (arg0, 1));
11204 /* (A + A) * C -> A * 2 * C */
11205 if (TREE_CODE (arg0) == PLUS_EXPR
11206 && TREE_CODE (arg1) == INTEGER_CST
11207 && operand_equal_p (TREE_OPERAND (arg0, 0),
11208 TREE_OPERAND (arg0, 1), 0))
11209 return fold_build2_loc (loc, MULT_EXPR, type,
11210 omit_one_operand_loc (loc, type,
11211 TREE_OPERAND (arg0, 0),
11212 TREE_OPERAND (arg0, 1)),
11213 fold_build2_loc (loc, MULT_EXPR, type,
11214 build_int_cst (type, 2) , arg1));
11216 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11217 sign-changing only. */
11218 if (TREE_CODE (arg1) == INTEGER_CST
11219 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11220 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11221 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11223 strict_overflow_p = false;
11224 if (TREE_CODE (arg1) == INTEGER_CST
11225 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11226 &strict_overflow_p)))
11228 if (strict_overflow_p)
11229 fold_overflow_warning (("assuming signed overflow does not "
11230 "occur when simplifying "
11231 "multiplication"),
11232 WARN_STRICT_OVERFLOW_MISC);
11233 return fold_convert_loc (loc, type, tem);
11236 /* Optimize z * conj(z) for integer complex numbers. */
11237 if (TREE_CODE (arg0) == CONJ_EXPR
11238 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11239 return fold_mult_zconjz (loc, type, arg1);
11240 if (TREE_CODE (arg1) == CONJ_EXPR
11241 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11242 return fold_mult_zconjz (loc, type, arg0);
11244 else
11246 /* Maybe fold x * 0 to 0. The expressions aren't the same
11247 when x is NaN, since x * 0 is also NaN. Nor are they the
11248 same in modes with signed zeros, since multiplying a
11249 negative value by 0 gives -0, not +0. */
11250 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11251 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11252 && real_zerop (arg1))
11253 return omit_one_operand_loc (loc, type, arg1, arg0);
11254 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11255 Likewise for complex arithmetic with signed zeros. */
11256 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11257 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11258 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11259 && real_onep (arg1))
11260 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11262 /* Transform x * -1.0 into -x. */
11263 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11264 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11265 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11266 && real_minus_onep (arg1))
11267 return fold_convert_loc (loc, type, negate_expr (arg0));
11269 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11270 the result for floating point types due to rounding so it is applied
11271 only if -fassociative-math was specify. */
11272 if (flag_associative_math
11273 && TREE_CODE (arg0) == RDIV_EXPR
11274 && TREE_CODE (arg1) == REAL_CST
11275 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11277 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11278 arg1);
11279 if (tem)
11280 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11281 TREE_OPERAND (arg0, 1));
11284 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11285 if (operand_equal_p (arg0, arg1, 0))
11287 tree tem = fold_strip_sign_ops (arg0);
11288 if (tem != NULL_TREE)
11290 tem = fold_convert_loc (loc, type, tem);
11291 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11295 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11296 This is not the same for NaNs or if signed zeros are
11297 involved. */
11298 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11299 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11300 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11301 && TREE_CODE (arg1) == COMPLEX_CST
11302 && real_zerop (TREE_REALPART (arg1)))
11304 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11305 if (real_onep (TREE_IMAGPART (arg1)))
11306 return
11307 fold_build2_loc (loc, COMPLEX_EXPR, type,
11308 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11309 rtype, arg0)),
11310 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11311 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11312 return
11313 fold_build2_loc (loc, COMPLEX_EXPR, type,
11314 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11315 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11316 rtype, arg0)));
11319 /* Optimize z * conj(z) for floating point complex numbers.
11320 Guarded by flag_unsafe_math_optimizations as non-finite
11321 imaginary components don't produce scalar results. */
11322 if (flag_unsafe_math_optimizations
11323 && TREE_CODE (arg0) == CONJ_EXPR
11324 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11325 return fold_mult_zconjz (loc, type, arg1);
11326 if (flag_unsafe_math_optimizations
11327 && TREE_CODE (arg1) == CONJ_EXPR
11328 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11329 return fold_mult_zconjz (loc, type, arg0);
11331 if (flag_unsafe_math_optimizations)
11333 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11334 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11336 /* Optimizations of root(...)*root(...). */
11337 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11339 tree rootfn, arg;
11340 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11341 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11343 /* Optimize sqrt(x)*sqrt(x) as x. */
11344 if (BUILTIN_SQRT_P (fcode0)
11345 && operand_equal_p (arg00, arg10, 0)
11346 && ! HONOR_SNANS (TYPE_MODE (type)))
11347 return arg00;
11349 /* Optimize root(x)*root(y) as root(x*y). */
11350 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11351 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11352 return build_call_expr_loc (loc, rootfn, 1, arg);
11355 /* Optimize expN(x)*expN(y) as expN(x+y). */
11356 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11358 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11359 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11360 CALL_EXPR_ARG (arg0, 0),
11361 CALL_EXPR_ARG (arg1, 0));
11362 return build_call_expr_loc (loc, expfn, 1, arg);
11365 /* Optimizations of pow(...)*pow(...). */
11366 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11367 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11368 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11370 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11371 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11372 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11373 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11375 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11376 if (operand_equal_p (arg01, arg11, 0))
11378 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11379 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11380 arg00, arg10);
11381 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11384 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11385 if (operand_equal_p (arg00, arg10, 0))
11387 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11388 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11389 arg01, arg11);
11390 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11394 /* Optimize tan(x)*cos(x) as sin(x). */
11395 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11396 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11397 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11398 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11399 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11400 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11401 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11402 CALL_EXPR_ARG (arg1, 0), 0))
11404 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11406 if (sinfn != NULL_TREE)
11407 return build_call_expr_loc (loc, sinfn, 1,
11408 CALL_EXPR_ARG (arg0, 0));
11411 /* Optimize x*pow(x,c) as pow(x,c+1). */
11412 if (fcode1 == BUILT_IN_POW
11413 || fcode1 == BUILT_IN_POWF
11414 || fcode1 == BUILT_IN_POWL)
11416 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11417 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11418 if (TREE_CODE (arg11) == REAL_CST
11419 && !TREE_OVERFLOW (arg11)
11420 && operand_equal_p (arg0, arg10, 0))
11422 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11423 REAL_VALUE_TYPE c;
11424 tree arg;
11426 c = TREE_REAL_CST (arg11);
11427 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11428 arg = build_real (type, c);
11429 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11433 /* Optimize pow(x,c)*x as pow(x,c+1). */
11434 if (fcode0 == BUILT_IN_POW
11435 || fcode0 == BUILT_IN_POWF
11436 || fcode0 == BUILT_IN_POWL)
11438 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11439 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11440 if (TREE_CODE (arg01) == REAL_CST
11441 && !TREE_OVERFLOW (arg01)
11442 && operand_equal_p (arg1, arg00, 0))
11444 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11445 REAL_VALUE_TYPE c;
11446 tree arg;
11448 c = TREE_REAL_CST (arg01);
11449 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11450 arg = build_real (type, c);
11451 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11455 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11456 if (!in_gimple_form
11457 && optimize
11458 && operand_equal_p (arg0, arg1, 0))
11460 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11462 if (powfn)
11464 tree arg = build_real (type, dconst2);
11465 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11470 goto associate;
11472 case BIT_IOR_EXPR:
11473 bit_ior:
11474 if (integer_all_onesp (arg1))
11475 return omit_one_operand_loc (loc, type, arg1, arg0);
11476 if (integer_zerop (arg1))
11477 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11478 if (operand_equal_p (arg0, arg1, 0))
11479 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11481 /* ~X | X is -1. */
11482 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11483 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11485 t1 = build_zero_cst (type);
11486 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11487 return omit_one_operand_loc (loc, type, t1, arg1);
11490 /* X | ~X is -1. */
11491 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11492 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11494 t1 = build_zero_cst (type);
11495 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11496 return omit_one_operand_loc (loc, type, t1, arg0);
11499 /* Canonicalize (X & C1) | C2. */
11500 if (TREE_CODE (arg0) == BIT_AND_EXPR
11501 && TREE_CODE (arg1) == INTEGER_CST
11502 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11504 double_int c1, c2, c3, msk;
11505 int width = TYPE_PRECISION (type), w;
11507 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11508 c2 = tree_to_double_int (arg1);
11510 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11511 if ((c1 & c2) == c1)
11512 return omit_one_operand_loc (loc, type, arg1,
11513 TREE_OPERAND (arg0, 0));
11515 msk = double_int::mask (width);
11517 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11518 if (msk.and_not (c1 | c2).is_zero ())
11519 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11520 TREE_OPERAND (arg0, 0), arg1);
11522 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11523 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11524 mode which allows further optimizations. */
11525 c1 &= msk;
11526 c2 &= msk;
11527 c3 = c1.and_not (c2);
11528 for (w = BITS_PER_UNIT;
11529 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11530 w <<= 1)
11532 unsigned HOST_WIDE_INT mask
11533 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11534 if (((c1.low | c2.low) & mask) == mask
11535 && (c1.low & ~mask) == 0 && c1.high == 0)
11537 c3 = double_int::from_uhwi (mask);
11538 break;
11542 if (c3 != c1)
11543 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11544 fold_build2_loc (loc, BIT_AND_EXPR, type,
11545 TREE_OPERAND (arg0, 0),
11546 double_int_to_tree (type,
11547 c3)),
11548 arg1);
11551 /* (X & Y) | Y is (X, Y). */
11552 if (TREE_CODE (arg0) == BIT_AND_EXPR
11553 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11554 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11555 /* (X & Y) | X is (Y, X). */
11556 if (TREE_CODE (arg0) == BIT_AND_EXPR
11557 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11558 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11559 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11560 /* X | (X & Y) is (Y, X). */
11561 if (TREE_CODE (arg1) == BIT_AND_EXPR
11562 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11563 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11564 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11565 /* X | (Y & X) is (Y, X). */
11566 if (TREE_CODE (arg1) == BIT_AND_EXPR
11567 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11568 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11569 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11571 /* (X & ~Y) | (~X & Y) is X ^ Y */
11572 if (TREE_CODE (arg0) == BIT_AND_EXPR
11573 && TREE_CODE (arg1) == BIT_AND_EXPR)
11575 tree a0, a1, l0, l1, n0, n1;
11577 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11578 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11580 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11581 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11583 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11584 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11586 if ((operand_equal_p (n0, a0, 0)
11587 && operand_equal_p (n1, a1, 0))
11588 || (operand_equal_p (n0, a1, 0)
11589 && operand_equal_p (n1, a0, 0)))
11590 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11593 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11594 if (t1 != NULL_TREE)
11595 return t1;
11597 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11599 This results in more efficient code for machines without a NAND
11600 instruction. Combine will canonicalize to the first form
11601 which will allow use of NAND instructions provided by the
11602 backend if they exist. */
11603 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11604 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11606 return
11607 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11608 build2 (BIT_AND_EXPR, type,
11609 fold_convert_loc (loc, type,
11610 TREE_OPERAND (arg0, 0)),
11611 fold_convert_loc (loc, type,
11612 TREE_OPERAND (arg1, 0))));
11615 /* See if this can be simplified into a rotate first. If that
11616 is unsuccessful continue in the association code. */
11617 goto bit_rotate;
11619 case BIT_XOR_EXPR:
11620 if (integer_zerop (arg1))
11621 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11622 if (integer_all_onesp (arg1))
11623 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11624 if (operand_equal_p (arg0, arg1, 0))
11625 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11627 /* ~X ^ X is -1. */
11628 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11629 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11631 t1 = build_zero_cst (type);
11632 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11633 return omit_one_operand_loc (loc, type, t1, arg1);
11636 /* X ^ ~X is -1. */
11637 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11638 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11640 t1 = build_zero_cst (type);
11641 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11642 return omit_one_operand_loc (loc, type, t1, arg0);
11645 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11646 with a constant, and the two constants have no bits in common,
11647 we should treat this as a BIT_IOR_EXPR since this may produce more
11648 simplifications. */
11649 if (TREE_CODE (arg0) == BIT_AND_EXPR
11650 && TREE_CODE (arg1) == BIT_AND_EXPR
11651 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11652 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11653 && integer_zerop (const_binop (BIT_AND_EXPR,
11654 TREE_OPERAND (arg0, 1),
11655 TREE_OPERAND (arg1, 1))))
11657 code = BIT_IOR_EXPR;
11658 goto bit_ior;
11661 /* (X | Y) ^ X -> Y & ~ X*/
11662 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11663 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11665 tree t2 = TREE_OPERAND (arg0, 1);
11666 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11667 arg1);
11668 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11669 fold_convert_loc (loc, type, t2),
11670 fold_convert_loc (loc, type, t1));
11671 return t1;
11674 /* (Y | X) ^ X -> Y & ~ X*/
11675 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11676 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11678 tree t2 = TREE_OPERAND (arg0, 0);
11679 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11680 arg1);
11681 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11682 fold_convert_loc (loc, type, t2),
11683 fold_convert_loc (loc, type, t1));
11684 return t1;
11687 /* X ^ (X | Y) -> Y & ~ X*/
11688 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11689 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11691 tree t2 = TREE_OPERAND (arg1, 1);
11692 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11693 arg0);
11694 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11695 fold_convert_loc (loc, type, t2),
11696 fold_convert_loc (loc, type, t1));
11697 return t1;
11700 /* X ^ (Y | X) -> Y & ~ X*/
11701 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11702 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11704 tree t2 = TREE_OPERAND (arg1, 0);
11705 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11706 arg0);
11707 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11708 fold_convert_loc (loc, type, t2),
11709 fold_convert_loc (loc, type, t1));
11710 return t1;
11713 /* Convert ~X ^ ~Y to X ^ Y. */
11714 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11715 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11716 return fold_build2_loc (loc, code, type,
11717 fold_convert_loc (loc, type,
11718 TREE_OPERAND (arg0, 0)),
11719 fold_convert_loc (loc, type,
11720 TREE_OPERAND (arg1, 0)));
11722 /* Convert ~X ^ C to X ^ ~C. */
11723 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11724 && TREE_CODE (arg1) == INTEGER_CST)
11725 return fold_build2_loc (loc, code, type,
11726 fold_convert_loc (loc, type,
11727 TREE_OPERAND (arg0, 0)),
11728 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11730 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11731 if (TREE_CODE (arg0) == BIT_AND_EXPR
11732 && integer_onep (TREE_OPERAND (arg0, 1))
11733 && integer_onep (arg1))
11734 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11735 build_zero_cst (TREE_TYPE (arg0)));
11737 /* Fold (X & Y) ^ Y as ~X & Y. */
11738 if (TREE_CODE (arg0) == BIT_AND_EXPR
11739 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11741 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11742 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11743 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11744 fold_convert_loc (loc, type, arg1));
11746 /* Fold (X & Y) ^ X as ~Y & X. */
11747 if (TREE_CODE (arg0) == BIT_AND_EXPR
11748 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11749 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11751 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11752 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11753 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11754 fold_convert_loc (loc, type, arg1));
11756 /* Fold X ^ (X & Y) as X & ~Y. */
11757 if (TREE_CODE (arg1) == BIT_AND_EXPR
11758 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11760 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11761 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11762 fold_convert_loc (loc, type, arg0),
11763 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11765 /* Fold X ^ (Y & X) as ~Y & X. */
11766 if (TREE_CODE (arg1) == BIT_AND_EXPR
11767 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11768 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11770 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11771 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11772 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11773 fold_convert_loc (loc, type, arg0));
11776 /* See if this can be simplified into a rotate first. If that
11777 is unsuccessful continue in the association code. */
11778 goto bit_rotate;
11780 case BIT_AND_EXPR:
11781 if (integer_all_onesp (arg1))
11782 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11783 if (integer_zerop (arg1))
11784 return omit_one_operand_loc (loc, type, arg1, arg0);
11785 if (operand_equal_p (arg0, arg1, 0))
11786 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11788 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11789 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11790 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11791 || (TREE_CODE (arg0) == EQ_EXPR
11792 && integer_zerop (TREE_OPERAND (arg0, 1))))
11793 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11794 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11796 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11797 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11798 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11799 || (TREE_CODE (arg1) == EQ_EXPR
11800 && integer_zerop (TREE_OPERAND (arg1, 1))))
11801 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11802 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11804 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11805 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11806 && TREE_CODE (arg1) == INTEGER_CST
11807 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11809 tree tmp1 = fold_convert_loc (loc, type, arg1);
11810 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11811 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11812 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11813 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11814 return
11815 fold_convert_loc (loc, type,
11816 fold_build2_loc (loc, BIT_IOR_EXPR,
11817 type, tmp2, tmp3));
11820 /* (X | Y) & Y is (X, Y). */
11821 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11822 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11823 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11824 /* (X | Y) & X is (Y, X). */
11825 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11826 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11827 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11828 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11829 /* X & (X | Y) is (Y, X). */
11830 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11831 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11832 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11833 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11834 /* X & (Y | X) is (Y, X). */
11835 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11836 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11837 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11838 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11840 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11841 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11842 && integer_onep (TREE_OPERAND (arg0, 1))
11843 && integer_onep (arg1))
11845 tree tem2;
11846 tem = TREE_OPERAND (arg0, 0);
11847 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11848 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11849 tem, tem2);
11850 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11851 build_zero_cst (TREE_TYPE (tem)));
11853 /* Fold ~X & 1 as (X & 1) == 0. */
11854 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11855 && integer_onep (arg1))
11857 tree tem2;
11858 tem = TREE_OPERAND (arg0, 0);
11859 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11860 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11861 tem, tem2);
11862 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11863 build_zero_cst (TREE_TYPE (tem)));
11865 /* Fold !X & 1 as X == 0. */
11866 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11867 && integer_onep (arg1))
11869 tem = TREE_OPERAND (arg0, 0);
11870 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11871 build_zero_cst (TREE_TYPE (tem)));
11874 /* Fold (X ^ Y) & Y as ~X & Y. */
11875 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11876 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11878 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11879 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11880 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11881 fold_convert_loc (loc, type, arg1));
11883 /* Fold (X ^ Y) & X as ~Y & X. */
11884 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11885 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11886 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11888 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11889 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11890 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11891 fold_convert_loc (loc, type, arg1));
11893 /* Fold X & (X ^ Y) as X & ~Y. */
11894 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11895 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11897 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11898 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11899 fold_convert_loc (loc, type, arg0),
11900 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11902 /* Fold X & (Y ^ X) as ~Y & X. */
11903 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11904 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11905 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11907 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11908 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11909 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11910 fold_convert_loc (loc, type, arg0));
11913 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11914 multiple of 1 << CST. */
11915 if (TREE_CODE (arg1) == INTEGER_CST)
11917 double_int cst1 = tree_to_double_int (arg1);
11918 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11919 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11920 if ((cst1 & ncst1) == ncst1
11921 && multiple_of_p (type, arg0,
11922 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11923 return fold_convert_loc (loc, type, arg0);
11926 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11927 bits from CST2. */
11928 if (TREE_CODE (arg1) == INTEGER_CST
11929 && TREE_CODE (arg0) == MULT_EXPR
11930 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11932 double_int darg1 = tree_to_double_int (arg1);
11933 double_int masked
11934 = mask_with_tz (type, darg1,
11935 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11937 if (masked.is_zero ())
11938 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11939 arg0, arg1);
11940 else if (masked != darg1)
11942 /* Avoid the transform if arg1 is a mask of some
11943 mode which allows further optimizations. */
11944 int pop = darg1.popcount ();
11945 if (!(pop >= BITS_PER_UNIT
11946 && exact_log2 (pop) != -1
11947 && double_int::mask (pop) == darg1))
11948 return fold_build2_loc (loc, code, type, op0,
11949 double_int_to_tree (type, masked));
11953 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11954 ((A & N) + B) & M -> (A + B) & M
11955 Similarly if (N & M) == 0,
11956 ((A | N) + B) & M -> (A + B) & M
11957 and for - instead of + (or unary - instead of +)
11958 and/or ^ instead of |.
11959 If B is constant and (B & M) == 0, fold into A & M. */
11960 if (tree_fits_uhwi_p (arg1))
11962 unsigned HOST_WIDE_INT cst1 = tree_to_uhwi (arg1);
11963 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11964 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11965 && (TREE_CODE (arg0) == PLUS_EXPR
11966 || TREE_CODE (arg0) == MINUS_EXPR
11967 || TREE_CODE (arg0) == NEGATE_EXPR)
11968 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11969 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11971 tree pmop[2];
11972 int which = 0;
11973 unsigned HOST_WIDE_INT cst0;
11975 /* Now we know that arg0 is (C + D) or (C - D) or
11976 -C and arg1 (M) is == (1LL << cst) - 1.
11977 Store C into PMOP[0] and D into PMOP[1]. */
11978 pmop[0] = TREE_OPERAND (arg0, 0);
11979 pmop[1] = NULL;
11980 if (TREE_CODE (arg0) != NEGATE_EXPR)
11982 pmop[1] = TREE_OPERAND (arg0, 1);
11983 which = 1;
11986 if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11987 || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11988 & cst1) != cst1)
11989 which = -1;
11991 for (; which >= 0; which--)
11992 switch (TREE_CODE (pmop[which]))
11994 case BIT_AND_EXPR:
11995 case BIT_IOR_EXPR:
11996 case BIT_XOR_EXPR:
11997 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11998 != INTEGER_CST)
11999 break;
12000 /* tree_to_[su]hwi not used, because we don't care about
12001 the upper bits. */
12002 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
12003 cst0 &= cst1;
12004 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
12006 if (cst0 != cst1)
12007 break;
12009 else if (cst0 != 0)
12010 break;
12011 /* If C or D is of the form (A & N) where
12012 (N & M) == M, or of the form (A | N) or
12013 (A ^ N) where (N & M) == 0, replace it with A. */
12014 pmop[which] = TREE_OPERAND (pmop[which], 0);
12015 break;
12016 case INTEGER_CST:
12017 /* If C or D is a N where (N & M) == 0, it can be
12018 omitted (assumed 0). */
12019 if ((TREE_CODE (arg0) == PLUS_EXPR
12020 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
12021 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
12022 pmop[which] = NULL;
12023 break;
12024 default:
12025 break;
12028 /* Only build anything new if we optimized one or both arguments
12029 above. */
12030 if (pmop[0] != TREE_OPERAND (arg0, 0)
12031 || (TREE_CODE (arg0) != NEGATE_EXPR
12032 && pmop[1] != TREE_OPERAND (arg0, 1)))
12034 tree utype = TREE_TYPE (arg0);
12035 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
12037 /* Perform the operations in a type that has defined
12038 overflow behavior. */
12039 utype = unsigned_type_for (TREE_TYPE (arg0));
12040 if (pmop[0] != NULL)
12041 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
12042 if (pmop[1] != NULL)
12043 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
12046 if (TREE_CODE (arg0) == NEGATE_EXPR)
12047 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
12048 else if (TREE_CODE (arg0) == PLUS_EXPR)
12050 if (pmop[0] != NULL && pmop[1] != NULL)
12051 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
12052 pmop[0], pmop[1]);
12053 else if (pmop[0] != NULL)
12054 tem = pmop[0];
12055 else if (pmop[1] != NULL)
12056 tem = pmop[1];
12057 else
12058 return build_int_cst (type, 0);
12060 else if (pmop[0] == NULL)
12061 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
12062 else
12063 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
12064 pmop[0], pmop[1]);
12065 /* TEM is now the new binary +, - or unary - replacement. */
12066 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
12067 fold_convert_loc (loc, utype, arg1));
12068 return fold_convert_loc (loc, type, tem);
12073 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
12074 if (t1 != NULL_TREE)
12075 return t1;
12076 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12077 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12078 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12080 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12082 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
12083 && (~TREE_INT_CST_LOW (arg1)
12084 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
12085 return
12086 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12089 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12091 This results in more efficient code for machines without a NOR
12092 instruction. Combine will canonicalize to the first form
12093 which will allow use of NOR instructions provided by the
12094 backend if they exist. */
12095 if (TREE_CODE (arg0) == BIT_NOT_EXPR
12096 && TREE_CODE (arg1) == BIT_NOT_EXPR)
12098 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
12099 build2 (BIT_IOR_EXPR, type,
12100 fold_convert_loc (loc, type,
12101 TREE_OPERAND (arg0, 0)),
12102 fold_convert_loc (loc, type,
12103 TREE_OPERAND (arg1, 0))));
12106 /* If arg0 is derived from the address of an object or function, we may
12107 be able to fold this expression using the object or function's
12108 alignment. */
12109 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
12111 unsigned HOST_WIDE_INT modulus, residue;
12112 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
12114 modulus = get_pointer_modulus_and_residue (arg0, &residue,
12115 integer_onep (arg1));
12117 /* This works because modulus is a power of 2. If this weren't the
12118 case, we'd have to replace it by its greatest power-of-2
12119 divisor: modulus & -modulus. */
12120 if (low < modulus)
12121 return build_int_cst (type, residue & low);
12124 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12125 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12126 if the new mask might be further optimized. */
12127 if ((TREE_CODE (arg0) == LSHIFT_EXPR
12128 || TREE_CODE (arg0) == RSHIFT_EXPR)
12129 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12130 && TREE_CODE (arg1) == INTEGER_CST
12131 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12132 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12133 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12134 < TYPE_PRECISION (TREE_TYPE (arg0))))
12136 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12137 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12138 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12139 tree shift_type = TREE_TYPE (arg0);
12141 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12142 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12143 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12144 && TYPE_PRECISION (TREE_TYPE (arg0))
12145 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12147 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12148 tree arg00 = TREE_OPERAND (arg0, 0);
12149 /* See if more bits can be proven as zero because of
12150 zero extension. */
12151 if (TREE_CODE (arg00) == NOP_EXPR
12152 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12154 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12155 if (TYPE_PRECISION (inner_type)
12156 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12157 && TYPE_PRECISION (inner_type) < prec)
12159 prec = TYPE_PRECISION (inner_type);
12160 /* See if we can shorten the right shift. */
12161 if (shiftc < prec)
12162 shift_type = inner_type;
12163 /* Otherwise X >> C1 is all zeros, so we'll optimize
12164 it into (X, 0) later on by making sure zerobits
12165 is all ones. */
12168 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12169 if (shiftc < prec)
12171 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12172 zerobits <<= prec - shiftc;
12174 /* For arithmetic shift if sign bit could be set, zerobits
12175 can contain actually sign bits, so no transformation is
12176 possible, unless MASK masks them all away. In that
12177 case the shift needs to be converted into logical shift. */
12178 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12179 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12181 if ((mask & zerobits) == 0)
12182 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12183 else
12184 zerobits = 0;
12188 /* ((X << 16) & 0xff00) is (X, 0). */
12189 if ((mask & zerobits) == mask)
12190 return omit_one_operand_loc (loc, type,
12191 build_int_cst (type, 0), arg0);
12193 newmask = mask | zerobits;
12194 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12196 /* Only do the transformation if NEWMASK is some integer
12197 mode's mask. */
12198 for (prec = BITS_PER_UNIT;
12199 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12200 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12201 break;
12202 if (prec < HOST_BITS_PER_WIDE_INT
12203 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12205 tree newmaskt;
12207 if (shift_type != TREE_TYPE (arg0))
12209 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12210 fold_convert_loc (loc, shift_type,
12211 TREE_OPERAND (arg0, 0)),
12212 TREE_OPERAND (arg0, 1));
12213 tem = fold_convert_loc (loc, type, tem);
12215 else
12216 tem = op0;
12217 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12218 if (!tree_int_cst_equal (newmaskt, arg1))
12219 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12224 goto associate;
12226 case RDIV_EXPR:
12227 /* Don't touch a floating-point divide by zero unless the mode
12228 of the constant can represent infinity. */
12229 if (TREE_CODE (arg1) == REAL_CST
12230 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12231 && real_zerop (arg1))
12232 return NULL_TREE;
12234 /* Optimize A / A to 1.0 if we don't care about
12235 NaNs or Infinities. Skip the transformation
12236 for non-real operands. */
12237 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12238 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12239 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12240 && operand_equal_p (arg0, arg1, 0))
12242 tree r = build_real (TREE_TYPE (arg0), dconst1);
12244 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12247 /* The complex version of the above A / A optimization. */
12248 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12249 && operand_equal_p (arg0, arg1, 0))
12251 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12252 if (! HONOR_NANS (TYPE_MODE (elem_type))
12253 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12255 tree r = build_real (elem_type, dconst1);
12256 /* omit_two_operands will call fold_convert for us. */
12257 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12261 /* (-A) / (-B) -> A / B */
12262 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12263 return fold_build2_loc (loc, RDIV_EXPR, type,
12264 TREE_OPERAND (arg0, 0),
12265 negate_expr (arg1));
12266 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12267 return fold_build2_loc (loc, RDIV_EXPR, type,
12268 negate_expr (arg0),
12269 TREE_OPERAND (arg1, 0));
12271 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12272 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12273 && real_onep (arg1))
12274 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12276 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12277 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12278 && real_minus_onep (arg1))
12279 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12280 negate_expr (arg0)));
12282 /* If ARG1 is a constant, we can convert this to a multiply by the
12283 reciprocal. This does not have the same rounding properties,
12284 so only do this if -freciprocal-math. We can actually
12285 always safely do it if ARG1 is a power of two, but it's hard to
12286 tell if it is or not in a portable manner. */
12287 if (optimize
12288 && (TREE_CODE (arg1) == REAL_CST
12289 || (TREE_CODE (arg1) == COMPLEX_CST
12290 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12291 || (TREE_CODE (arg1) == VECTOR_CST
12292 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12294 if (flag_reciprocal_math
12295 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12296 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12297 /* Find the reciprocal if optimizing and the result is exact.
12298 TODO: Complex reciprocal not implemented. */
12299 if (TREE_CODE (arg1) != COMPLEX_CST)
12301 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12303 if (inverse)
12304 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12307 /* Convert A/B/C to A/(B*C). */
12308 if (flag_reciprocal_math
12309 && TREE_CODE (arg0) == RDIV_EXPR)
12310 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12311 fold_build2_loc (loc, MULT_EXPR, type,
12312 TREE_OPERAND (arg0, 1), arg1));
12314 /* Convert A/(B/C) to (A/B)*C. */
12315 if (flag_reciprocal_math
12316 && TREE_CODE (arg1) == RDIV_EXPR)
12317 return fold_build2_loc (loc, MULT_EXPR, type,
12318 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12319 TREE_OPERAND (arg1, 0)),
12320 TREE_OPERAND (arg1, 1));
12322 /* Convert C1/(X*C2) into (C1/C2)/X. */
12323 if (flag_reciprocal_math
12324 && TREE_CODE (arg1) == MULT_EXPR
12325 && TREE_CODE (arg0) == REAL_CST
12326 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12328 tree tem = const_binop (RDIV_EXPR, arg0,
12329 TREE_OPERAND (arg1, 1));
12330 if (tem)
12331 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12332 TREE_OPERAND (arg1, 0));
12335 if (flag_unsafe_math_optimizations)
12337 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12338 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12340 /* Optimize sin(x)/cos(x) as tan(x). */
12341 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12342 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12343 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12344 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12345 CALL_EXPR_ARG (arg1, 0), 0))
12347 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12349 if (tanfn != NULL_TREE)
12350 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12353 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12354 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12355 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12356 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12357 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12358 CALL_EXPR_ARG (arg1, 0), 0))
12360 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12362 if (tanfn != NULL_TREE)
12364 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12365 CALL_EXPR_ARG (arg0, 0));
12366 return fold_build2_loc (loc, RDIV_EXPR, type,
12367 build_real (type, dconst1), tmp);
12371 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12372 NaNs or Infinities. */
12373 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12374 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12375 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12377 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12378 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12380 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12381 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12382 && operand_equal_p (arg00, arg01, 0))
12384 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12386 if (cosfn != NULL_TREE)
12387 return build_call_expr_loc (loc, cosfn, 1, arg00);
12391 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12392 NaNs or Infinities. */
12393 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12394 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12395 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12397 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12398 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12400 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12401 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12402 && operand_equal_p (arg00, arg01, 0))
12404 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12406 if (cosfn != NULL_TREE)
12408 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12409 return fold_build2_loc (loc, RDIV_EXPR, type,
12410 build_real (type, dconst1),
12411 tmp);
12416 /* Optimize pow(x,c)/x as pow(x,c-1). */
12417 if (fcode0 == BUILT_IN_POW
12418 || fcode0 == BUILT_IN_POWF
12419 || fcode0 == BUILT_IN_POWL)
12421 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12422 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12423 if (TREE_CODE (arg01) == REAL_CST
12424 && !TREE_OVERFLOW (arg01)
12425 && operand_equal_p (arg1, arg00, 0))
12427 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12428 REAL_VALUE_TYPE c;
12429 tree arg;
12431 c = TREE_REAL_CST (arg01);
12432 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12433 arg = build_real (type, c);
12434 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12438 /* Optimize a/root(b/c) into a*root(c/b). */
12439 if (BUILTIN_ROOT_P (fcode1))
12441 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12443 if (TREE_CODE (rootarg) == RDIV_EXPR)
12445 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12446 tree b = TREE_OPERAND (rootarg, 0);
12447 tree c = TREE_OPERAND (rootarg, 1);
12449 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12451 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12452 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12456 /* Optimize x/expN(y) into x*expN(-y). */
12457 if (BUILTIN_EXPONENT_P (fcode1))
12459 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12460 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12461 arg1 = build_call_expr_loc (loc,
12462 expfn, 1,
12463 fold_convert_loc (loc, type, arg));
12464 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12467 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12468 if (fcode1 == BUILT_IN_POW
12469 || fcode1 == BUILT_IN_POWF
12470 || fcode1 == BUILT_IN_POWL)
12472 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12473 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12474 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12475 tree neg11 = fold_convert_loc (loc, type,
12476 negate_expr (arg11));
12477 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12478 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12481 return NULL_TREE;
12483 case TRUNC_DIV_EXPR:
12484 /* Optimize (X & (-A)) / A where A is a power of 2,
12485 to X >> log2(A) */
12486 if (TREE_CODE (arg0) == BIT_AND_EXPR
12487 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12488 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12490 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12491 arg1, TREE_OPERAND (arg0, 1));
12492 if (sum && integer_zerop (sum)) {
12493 unsigned long pow2;
12495 if (TREE_INT_CST_LOW (arg1))
12496 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12497 else
12498 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12499 + HOST_BITS_PER_WIDE_INT;
12501 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12502 TREE_OPERAND (arg0, 0),
12503 build_int_cst (integer_type_node, pow2));
12507 /* Fall through */
12509 case FLOOR_DIV_EXPR:
12510 /* Simplify A / (B << N) where A and B are positive and B is
12511 a power of 2, to A >> (N + log2(B)). */
12512 strict_overflow_p = false;
12513 if (TREE_CODE (arg1) == LSHIFT_EXPR
12514 && (TYPE_UNSIGNED (type)
12515 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12517 tree sval = TREE_OPERAND (arg1, 0);
12518 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12520 tree sh_cnt = TREE_OPERAND (arg1, 1);
12521 unsigned long pow2;
12523 if (TREE_INT_CST_LOW (sval))
12524 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12525 else
12526 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12527 + HOST_BITS_PER_WIDE_INT;
12529 if (strict_overflow_p)
12530 fold_overflow_warning (("assuming signed overflow does not "
12531 "occur when simplifying A / (B << N)"),
12532 WARN_STRICT_OVERFLOW_MISC);
12534 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12535 sh_cnt,
12536 build_int_cst (TREE_TYPE (sh_cnt),
12537 pow2));
12538 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12539 fold_convert_loc (loc, type, arg0), sh_cnt);
12543 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12544 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12545 if (INTEGRAL_TYPE_P (type)
12546 && TYPE_UNSIGNED (type)
12547 && code == FLOOR_DIV_EXPR)
12548 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12550 /* Fall through */
12552 case ROUND_DIV_EXPR:
12553 case CEIL_DIV_EXPR:
12554 case EXACT_DIV_EXPR:
12555 if (integer_onep (arg1))
12556 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12557 if (integer_zerop (arg1))
12558 return NULL_TREE;
12559 /* X / -1 is -X. */
12560 if (!TYPE_UNSIGNED (type)
12561 && TREE_CODE (arg1) == INTEGER_CST
12562 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12563 && TREE_INT_CST_HIGH (arg1) == -1)
12564 return fold_convert_loc (loc, type, negate_expr (arg0));
12566 /* Convert -A / -B to A / B when the type is signed and overflow is
12567 undefined. */
12568 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12569 && TREE_CODE (arg0) == NEGATE_EXPR
12570 && negate_expr_p (arg1))
12572 if (INTEGRAL_TYPE_P (type))
12573 fold_overflow_warning (("assuming signed overflow does not occur "
12574 "when distributing negation across "
12575 "division"),
12576 WARN_STRICT_OVERFLOW_MISC);
12577 return fold_build2_loc (loc, code, type,
12578 fold_convert_loc (loc, type,
12579 TREE_OPERAND (arg0, 0)),
12580 fold_convert_loc (loc, type,
12581 negate_expr (arg1)));
12583 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12584 && TREE_CODE (arg1) == NEGATE_EXPR
12585 && negate_expr_p (arg0))
12587 if (INTEGRAL_TYPE_P (type))
12588 fold_overflow_warning (("assuming signed overflow does not occur "
12589 "when distributing negation across "
12590 "division"),
12591 WARN_STRICT_OVERFLOW_MISC);
12592 return fold_build2_loc (loc, code, type,
12593 fold_convert_loc (loc, type,
12594 negate_expr (arg0)),
12595 fold_convert_loc (loc, type,
12596 TREE_OPERAND (arg1, 0)));
12599 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12600 operation, EXACT_DIV_EXPR.
12602 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12603 At one time others generated faster code, it's not clear if they do
12604 after the last round to changes to the DIV code in expmed.c. */
12605 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12606 && multiple_of_p (type, arg0, arg1))
12607 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12609 strict_overflow_p = false;
12610 if (TREE_CODE (arg1) == INTEGER_CST
12611 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12612 &strict_overflow_p)))
12614 if (strict_overflow_p)
12615 fold_overflow_warning (("assuming signed overflow does not occur "
12616 "when simplifying division"),
12617 WARN_STRICT_OVERFLOW_MISC);
12618 return fold_convert_loc (loc, type, tem);
12621 return NULL_TREE;
12623 case CEIL_MOD_EXPR:
12624 case FLOOR_MOD_EXPR:
12625 case ROUND_MOD_EXPR:
12626 case TRUNC_MOD_EXPR:
12627 /* X % 1 is always zero, but be sure to preserve any side
12628 effects in X. */
12629 if (integer_onep (arg1))
12630 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12632 /* X % 0, return X % 0 unchanged so that we can get the
12633 proper warnings and errors. */
12634 if (integer_zerop (arg1))
12635 return NULL_TREE;
12637 /* 0 % X is always zero, but be sure to preserve any side
12638 effects in X. Place this after checking for X == 0. */
12639 if (integer_zerop (arg0))
12640 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12642 /* X % -1 is zero. */
12643 if (!TYPE_UNSIGNED (type)
12644 && TREE_CODE (arg1) == INTEGER_CST
12645 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12646 && TREE_INT_CST_HIGH (arg1) == -1)
12647 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12649 /* X % -C is the same as X % C. */
12650 if (code == TRUNC_MOD_EXPR
12651 && !TYPE_UNSIGNED (type)
12652 && TREE_CODE (arg1) == INTEGER_CST
12653 && !TREE_OVERFLOW (arg1)
12654 && TREE_INT_CST_HIGH (arg1) < 0
12655 && !TYPE_OVERFLOW_TRAPS (type)
12656 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12657 && !sign_bit_p (arg1, arg1))
12658 return fold_build2_loc (loc, code, type,
12659 fold_convert_loc (loc, type, arg0),
12660 fold_convert_loc (loc, type,
12661 negate_expr (arg1)));
12663 /* X % -Y is the same as X % Y. */
12664 if (code == TRUNC_MOD_EXPR
12665 && !TYPE_UNSIGNED (type)
12666 && TREE_CODE (arg1) == NEGATE_EXPR
12667 && !TYPE_OVERFLOW_TRAPS (type))
12668 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12669 fold_convert_loc (loc, type,
12670 TREE_OPERAND (arg1, 0)));
12672 strict_overflow_p = false;
12673 if (TREE_CODE (arg1) == INTEGER_CST
12674 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12675 &strict_overflow_p)))
12677 if (strict_overflow_p)
12678 fold_overflow_warning (("assuming signed overflow does not occur "
12679 "when simplifying modulus"),
12680 WARN_STRICT_OVERFLOW_MISC);
12681 return fold_convert_loc (loc, type, tem);
12684 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12685 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12686 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12687 && (TYPE_UNSIGNED (type)
12688 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12690 tree c = arg1;
12691 /* Also optimize A % (C << N) where C is a power of 2,
12692 to A & ((C << N) - 1). */
12693 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12694 c = TREE_OPERAND (arg1, 0);
12696 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12698 tree mask
12699 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12700 build_int_cst (TREE_TYPE (arg1), 1));
12701 if (strict_overflow_p)
12702 fold_overflow_warning (("assuming signed overflow does not "
12703 "occur when simplifying "
12704 "X % (power of two)"),
12705 WARN_STRICT_OVERFLOW_MISC);
12706 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12707 fold_convert_loc (loc, type, arg0),
12708 fold_convert_loc (loc, type, mask));
12712 return NULL_TREE;
12714 case LROTATE_EXPR:
12715 case RROTATE_EXPR:
12716 if (integer_all_onesp (arg0))
12717 return omit_one_operand_loc (loc, type, arg0, arg1);
12718 goto shift;
12720 case RSHIFT_EXPR:
12721 /* Optimize -1 >> x for arithmetic right shifts. */
12722 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12723 && tree_expr_nonnegative_p (arg1))
12724 return omit_one_operand_loc (loc, type, arg0, arg1);
12725 /* ... fall through ... */
12727 case LSHIFT_EXPR:
12728 shift:
12729 if (integer_zerop (arg1))
12730 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12731 if (integer_zerop (arg0))
12732 return omit_one_operand_loc (loc, type, arg0, arg1);
12734 /* Prefer vector1 << scalar to vector1 << vector2
12735 if vector2 is uniform. */
12736 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12737 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12738 return fold_build2_loc (loc, code, type, op0, tem);
12740 /* Since negative shift count is not well-defined,
12741 don't try to compute it in the compiler. */
12742 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12743 return NULL_TREE;
12745 prec = element_precision (type);
12747 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12748 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12749 && tree_to_uhwi (arg1) < prec
12750 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12751 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12753 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12754 + tree_to_uhwi (arg1));
12756 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12757 being well defined. */
12758 if (low >= prec)
12760 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12761 low = low % prec;
12762 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12763 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12764 TREE_OPERAND (arg0, 0));
12765 else
12766 low = prec - 1;
12769 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12770 build_int_cst (TREE_TYPE (arg1), low));
12773 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12774 into x & ((unsigned)-1 >> c) for unsigned types. */
12775 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12776 || (TYPE_UNSIGNED (type)
12777 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12778 && tree_fits_uhwi_p (arg1)
12779 && tree_to_uhwi (arg1) < prec
12780 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12781 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12783 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12784 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12785 tree lshift;
12786 tree arg00;
12788 if (low0 == low1)
12790 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12792 lshift = build_minus_one_cst (type);
12793 lshift = const_binop (code, lshift, arg1);
12795 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12799 /* Rewrite an LROTATE_EXPR by a constant into an
12800 RROTATE_EXPR by a new constant. */
12801 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12803 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12804 tem = const_binop (MINUS_EXPR, tem, arg1);
12805 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12808 /* If we have a rotate of a bit operation with the rotate count and
12809 the second operand of the bit operation both constant,
12810 permute the two operations. */
12811 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12812 && (TREE_CODE (arg0) == BIT_AND_EXPR
12813 || TREE_CODE (arg0) == BIT_IOR_EXPR
12814 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12815 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12816 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12817 fold_build2_loc (loc, code, type,
12818 TREE_OPERAND (arg0, 0), arg1),
12819 fold_build2_loc (loc, code, type,
12820 TREE_OPERAND (arg0, 1), arg1));
12822 /* Two consecutive rotates adding up to the precision of the
12823 type can be ignored. */
12824 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12825 && TREE_CODE (arg0) == RROTATE_EXPR
12826 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12827 && TREE_INT_CST_HIGH (arg1) == 0
12828 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12829 && ((TREE_INT_CST_LOW (arg1)
12830 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12831 == prec))
12832 return TREE_OPERAND (arg0, 0);
12834 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12835 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12836 if the latter can be further optimized. */
12837 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12838 && TREE_CODE (arg0) == BIT_AND_EXPR
12839 && TREE_CODE (arg1) == INTEGER_CST
12840 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12842 tree mask = fold_build2_loc (loc, code, type,
12843 fold_convert_loc (loc, type,
12844 TREE_OPERAND (arg0, 1)),
12845 arg1);
12846 tree shift = fold_build2_loc (loc, code, type,
12847 fold_convert_loc (loc, type,
12848 TREE_OPERAND (arg0, 0)),
12849 arg1);
12850 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12851 if (tem)
12852 return tem;
12855 return NULL_TREE;
12857 case MIN_EXPR:
12858 if (operand_equal_p (arg0, arg1, 0))
12859 return omit_one_operand_loc (loc, type, arg0, arg1);
12860 if (INTEGRAL_TYPE_P (type)
12861 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12862 return omit_one_operand_loc (loc, type, arg1, arg0);
12863 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12864 if (tem)
12865 return tem;
12866 goto associate;
12868 case MAX_EXPR:
12869 if (operand_equal_p (arg0, arg1, 0))
12870 return omit_one_operand_loc (loc, type, arg0, arg1);
12871 if (INTEGRAL_TYPE_P (type)
12872 && TYPE_MAX_VALUE (type)
12873 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12874 return omit_one_operand_loc (loc, type, arg1, arg0);
12875 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12876 if (tem)
12877 return tem;
12878 goto associate;
12880 case TRUTH_ANDIF_EXPR:
12881 /* Note that the operands of this must be ints
12882 and their values must be 0 or 1.
12883 ("true" is a fixed value perhaps depending on the language.) */
12884 /* If first arg is constant zero, return it. */
12885 if (integer_zerop (arg0))
12886 return fold_convert_loc (loc, type, arg0);
12887 case TRUTH_AND_EXPR:
12888 /* If either arg is constant true, drop it. */
12889 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12890 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12891 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12892 /* Preserve sequence points. */
12893 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12894 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12895 /* If second arg is constant zero, result is zero, but first arg
12896 must be evaluated. */
12897 if (integer_zerop (arg1))
12898 return omit_one_operand_loc (loc, type, arg1, arg0);
12899 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12900 case will be handled here. */
12901 if (integer_zerop (arg0))
12902 return omit_one_operand_loc (loc, type, arg0, arg1);
12904 /* !X && X is always false. */
12905 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12906 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12907 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12908 /* X && !X is always false. */
12909 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12910 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12911 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12913 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12914 means A >= Y && A != MAX, but in this case we know that
12915 A < X <= MAX. */
12917 if (!TREE_SIDE_EFFECTS (arg0)
12918 && !TREE_SIDE_EFFECTS (arg1))
12920 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12921 if (tem && !operand_equal_p (tem, arg0, 0))
12922 return fold_build2_loc (loc, code, type, tem, arg1);
12924 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12925 if (tem && !operand_equal_p (tem, arg1, 0))
12926 return fold_build2_loc (loc, code, type, arg0, tem);
12929 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12930 != NULL_TREE)
12931 return tem;
12933 return NULL_TREE;
12935 case TRUTH_ORIF_EXPR:
12936 /* Note that the operands of this must be ints
12937 and their values must be 0 or true.
12938 ("true" is a fixed value perhaps depending on the language.) */
12939 /* If first arg is constant true, return it. */
12940 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12941 return fold_convert_loc (loc, type, arg0);
12942 case TRUTH_OR_EXPR:
12943 /* If either arg is constant zero, drop it. */
12944 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12945 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12946 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12947 /* Preserve sequence points. */
12948 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12949 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12950 /* If second arg is constant true, result is true, but we must
12951 evaluate first arg. */
12952 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12953 return omit_one_operand_loc (loc, type, arg1, arg0);
12954 /* Likewise for first arg, but note this only occurs here for
12955 TRUTH_OR_EXPR. */
12956 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12957 return omit_one_operand_loc (loc, type, arg0, arg1);
12959 /* !X || X is always true. */
12960 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12961 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12962 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12963 /* X || !X is always true. */
12964 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12965 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12966 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12968 /* (X && !Y) || (!X && Y) is X ^ Y */
12969 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12970 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12972 tree a0, a1, l0, l1, n0, n1;
12974 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12975 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12977 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12978 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12980 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12981 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12983 if ((operand_equal_p (n0, a0, 0)
12984 && operand_equal_p (n1, a1, 0))
12985 || (operand_equal_p (n0, a1, 0)
12986 && operand_equal_p (n1, a0, 0)))
12987 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12990 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12991 != NULL_TREE)
12992 return tem;
12994 return NULL_TREE;
12996 case TRUTH_XOR_EXPR:
12997 /* If the second arg is constant zero, drop it. */
12998 if (integer_zerop (arg1))
12999 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
13000 /* If the second arg is constant true, this is a logical inversion. */
13001 if (integer_onep (arg1))
13003 tem = invert_truthvalue_loc (loc, arg0);
13004 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
13006 /* Identical arguments cancel to zero. */
13007 if (operand_equal_p (arg0, arg1, 0))
13008 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13010 /* !X ^ X is always true. */
13011 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
13012 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
13013 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
13015 /* X ^ !X is always true. */
13016 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
13017 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
13018 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13020 return NULL_TREE;
13022 case EQ_EXPR:
13023 case NE_EXPR:
13024 STRIP_NOPS (arg0);
13025 STRIP_NOPS (arg1);
13027 tem = fold_comparison (loc, code, type, op0, op1);
13028 if (tem != NULL_TREE)
13029 return tem;
13031 /* bool_var != 0 becomes bool_var. */
13032 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
13033 && code == NE_EXPR)
13034 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
13036 /* bool_var == 1 becomes bool_var. */
13037 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
13038 && code == EQ_EXPR)
13039 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
13041 /* bool_var != 1 becomes !bool_var. */
13042 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
13043 && code == NE_EXPR)
13044 return fold_convert_loc (loc, type,
13045 fold_build1_loc (loc, TRUTH_NOT_EXPR,
13046 TREE_TYPE (arg0), arg0));
13048 /* bool_var == 0 becomes !bool_var. */
13049 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
13050 && code == EQ_EXPR)
13051 return fold_convert_loc (loc, type,
13052 fold_build1_loc (loc, TRUTH_NOT_EXPR,
13053 TREE_TYPE (arg0), arg0));
13055 /* !exp != 0 becomes !exp */
13056 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
13057 && code == NE_EXPR)
13058 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
13060 /* If this is an equality comparison of the address of two non-weak,
13061 unaliased symbols neither of which are extern (since we do not
13062 have access to attributes for externs), then we know the result. */
13063 if (TREE_CODE (arg0) == ADDR_EXPR
13064 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
13065 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
13066 && ! lookup_attribute ("alias",
13067 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
13068 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
13069 && TREE_CODE (arg1) == ADDR_EXPR
13070 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
13071 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
13072 && ! lookup_attribute ("alias",
13073 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
13074 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
13076 /* We know that we're looking at the address of two
13077 non-weak, unaliased, static _DECL nodes.
13079 It is both wasteful and incorrect to call operand_equal_p
13080 to compare the two ADDR_EXPR nodes. It is wasteful in that
13081 all we need to do is test pointer equality for the arguments
13082 to the two ADDR_EXPR nodes. It is incorrect to use
13083 operand_equal_p as that function is NOT equivalent to a
13084 C equality test. It can in fact return false for two
13085 objects which would test as equal using the C equality
13086 operator. */
13087 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
13088 return constant_boolean_node (equal
13089 ? code == EQ_EXPR : code != EQ_EXPR,
13090 type);
13093 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
13094 a MINUS_EXPR of a constant, we can convert it into a comparison with
13095 a revised constant as long as no overflow occurs. */
13096 if (TREE_CODE (arg1) == INTEGER_CST
13097 && (TREE_CODE (arg0) == PLUS_EXPR
13098 || TREE_CODE (arg0) == MINUS_EXPR)
13099 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13100 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
13101 ? MINUS_EXPR : PLUS_EXPR,
13102 fold_convert_loc (loc, TREE_TYPE (arg0),
13103 arg1),
13104 TREE_OPERAND (arg0, 1)))
13105 && !TREE_OVERFLOW (tem))
13106 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13108 /* Similarly for a NEGATE_EXPR. */
13109 if (TREE_CODE (arg0) == NEGATE_EXPR
13110 && TREE_CODE (arg1) == INTEGER_CST
13111 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
13112 arg1)))
13113 && TREE_CODE (tem) == INTEGER_CST
13114 && !TREE_OVERFLOW (tem))
13115 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13117 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13118 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13119 && TREE_CODE (arg1) == INTEGER_CST
13120 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13121 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13122 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
13123 fold_convert_loc (loc,
13124 TREE_TYPE (arg0),
13125 arg1),
13126 TREE_OPERAND (arg0, 1)));
13128 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13129 if ((TREE_CODE (arg0) == PLUS_EXPR
13130 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
13131 || TREE_CODE (arg0) == MINUS_EXPR)
13132 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13133 0)),
13134 arg1, 0)
13135 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13136 || POINTER_TYPE_P (TREE_TYPE (arg0))))
13138 tree val = TREE_OPERAND (arg0, 1);
13139 return omit_two_operands_loc (loc, type,
13140 fold_build2_loc (loc, code, type,
13141 val,
13142 build_int_cst (TREE_TYPE (val),
13143 0)),
13144 TREE_OPERAND (arg0, 0), arg1);
13147 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13148 if (TREE_CODE (arg0) == MINUS_EXPR
13149 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
13150 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13151 1)),
13152 arg1, 0)
13153 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
13155 return omit_two_operands_loc (loc, type,
13156 code == NE_EXPR
13157 ? boolean_true_node : boolean_false_node,
13158 TREE_OPERAND (arg0, 1), arg1);
13161 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13162 for !=. Don't do this for ordered comparisons due to overflow. */
13163 if (TREE_CODE (arg0) == MINUS_EXPR
13164 && integer_zerop (arg1))
13165 return fold_build2_loc (loc, code, type,
13166 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
13168 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13169 if (TREE_CODE (arg0) == ABS_EXPR
13170 && (integer_zerop (arg1) || real_zerop (arg1)))
13171 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13173 /* If this is an EQ or NE comparison with zero and ARG0 is
13174 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13175 two operations, but the latter can be done in one less insn
13176 on machines that have only two-operand insns or on which a
13177 constant cannot be the first operand. */
13178 if (TREE_CODE (arg0) == BIT_AND_EXPR
13179 && integer_zerop (arg1))
13181 tree arg00 = TREE_OPERAND (arg0, 0);
13182 tree arg01 = TREE_OPERAND (arg0, 1);
13183 if (TREE_CODE (arg00) == LSHIFT_EXPR
13184 && integer_onep (TREE_OPERAND (arg00, 0)))
13186 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13187 arg01, TREE_OPERAND (arg00, 1));
13188 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13189 build_int_cst (TREE_TYPE (arg0), 1));
13190 return fold_build2_loc (loc, code, type,
13191 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13192 arg1);
13194 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13195 && integer_onep (TREE_OPERAND (arg01, 0)))
13197 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13198 arg00, TREE_OPERAND (arg01, 1));
13199 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13200 build_int_cst (TREE_TYPE (arg0), 1));
13201 return fold_build2_loc (loc, code, type,
13202 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13203 arg1);
13207 /* If this is an NE or EQ comparison of zero against the result of a
13208 signed MOD operation whose second operand is a power of 2, make
13209 the MOD operation unsigned since it is simpler and equivalent. */
13210 if (integer_zerop (arg1)
13211 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13212 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13213 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13214 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13215 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13216 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13218 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13219 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13220 fold_convert_loc (loc, newtype,
13221 TREE_OPERAND (arg0, 0)),
13222 fold_convert_loc (loc, newtype,
13223 TREE_OPERAND (arg0, 1)));
13225 return fold_build2_loc (loc, code, type, newmod,
13226 fold_convert_loc (loc, newtype, arg1));
13229 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13230 C1 is a valid shift constant, and C2 is a power of two, i.e.
13231 a single bit. */
13232 if (TREE_CODE (arg0) == BIT_AND_EXPR
13233 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13234 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13235 == INTEGER_CST
13236 && integer_pow2p (TREE_OPERAND (arg0, 1))
13237 && integer_zerop (arg1))
13239 tree itype = TREE_TYPE (arg0);
13240 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13241 prec = TYPE_PRECISION (itype);
13243 /* Check for a valid shift count. */
13244 if (TREE_INT_CST_HIGH (arg001) == 0
13245 && TREE_INT_CST_LOW (arg001) < prec)
13247 tree arg01 = TREE_OPERAND (arg0, 1);
13248 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13249 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13250 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13251 can be rewritten as (X & (C2 << C1)) != 0. */
13252 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13254 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13255 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13256 return fold_build2_loc (loc, code, type, tem,
13257 fold_convert_loc (loc, itype, arg1));
13259 /* Otherwise, for signed (arithmetic) shifts,
13260 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13261 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13262 else if (!TYPE_UNSIGNED (itype))
13263 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13264 arg000, build_int_cst (itype, 0));
13265 /* Otherwise, of unsigned (logical) shifts,
13266 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13267 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13268 else
13269 return omit_one_operand_loc (loc, type,
13270 code == EQ_EXPR ? integer_one_node
13271 : integer_zero_node,
13272 arg000);
13276 /* If we have (A & C) == C where C is a power of 2, convert this into
13277 (A & C) != 0. Similarly for NE_EXPR. */
13278 if (TREE_CODE (arg0) == BIT_AND_EXPR
13279 && integer_pow2p (TREE_OPERAND (arg0, 1))
13280 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13281 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13282 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13283 integer_zero_node));
13285 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13286 bit, then fold the expression into A < 0 or A >= 0. */
13287 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13288 if (tem)
13289 return tem;
13291 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13292 Similarly for NE_EXPR. */
13293 if (TREE_CODE (arg0) == BIT_AND_EXPR
13294 && TREE_CODE (arg1) == INTEGER_CST
13295 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13297 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13298 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13299 TREE_OPERAND (arg0, 1));
13300 tree dandnotc
13301 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13302 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13303 notc);
13304 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13305 if (integer_nonzerop (dandnotc))
13306 return omit_one_operand_loc (loc, type, rslt, arg0);
13309 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13310 Similarly for NE_EXPR. */
13311 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13312 && TREE_CODE (arg1) == INTEGER_CST
13313 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13315 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13316 tree candnotd
13317 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13318 TREE_OPERAND (arg0, 1),
13319 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13320 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13321 if (integer_nonzerop (candnotd))
13322 return omit_one_operand_loc (loc, type, rslt, arg0);
13325 /* If this is a comparison of a field, we may be able to simplify it. */
13326 if ((TREE_CODE (arg0) == COMPONENT_REF
13327 || TREE_CODE (arg0) == BIT_FIELD_REF)
13328 /* Handle the constant case even without -O
13329 to make sure the warnings are given. */
13330 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13332 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13333 if (t1)
13334 return t1;
13337 /* Optimize comparisons of strlen vs zero to a compare of the
13338 first character of the string vs zero. To wit,
13339 strlen(ptr) == 0 => *ptr == 0
13340 strlen(ptr) != 0 => *ptr != 0
13341 Other cases should reduce to one of these two (or a constant)
13342 due to the return value of strlen being unsigned. */
13343 if (TREE_CODE (arg0) == CALL_EXPR
13344 && integer_zerop (arg1))
13346 tree fndecl = get_callee_fndecl (arg0);
13348 if (fndecl
13349 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13350 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13351 && call_expr_nargs (arg0) == 1
13352 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13354 tree iref = build_fold_indirect_ref_loc (loc,
13355 CALL_EXPR_ARG (arg0, 0));
13356 return fold_build2_loc (loc, code, type, iref,
13357 build_int_cst (TREE_TYPE (iref), 0));
13361 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13362 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13363 if (TREE_CODE (arg0) == RSHIFT_EXPR
13364 && integer_zerop (arg1)
13365 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13367 tree arg00 = TREE_OPERAND (arg0, 0);
13368 tree arg01 = TREE_OPERAND (arg0, 1);
13369 tree itype = TREE_TYPE (arg00);
13370 if (TREE_INT_CST_HIGH (arg01) == 0
13371 && TREE_INT_CST_LOW (arg01)
13372 == (unsigned HOST_WIDE_INT) (element_precision (itype) - 1))
13374 if (TYPE_UNSIGNED (itype))
13376 itype = signed_type_for (itype);
13377 arg00 = fold_convert_loc (loc, itype, arg00);
13379 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13380 type, arg00, build_zero_cst (itype));
13384 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13385 if (integer_zerop (arg1)
13386 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13387 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13388 TREE_OPERAND (arg0, 1));
13390 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13391 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13392 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13393 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13394 build_zero_cst (TREE_TYPE (arg0)));
13395 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13396 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13397 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13398 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13399 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13400 build_zero_cst (TREE_TYPE (arg0)));
13402 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13403 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13404 && TREE_CODE (arg1) == INTEGER_CST
13405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13406 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13407 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13408 TREE_OPERAND (arg0, 1), arg1));
13410 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13411 (X & C) == 0 when C is a single bit. */
13412 if (TREE_CODE (arg0) == BIT_AND_EXPR
13413 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13414 && integer_zerop (arg1)
13415 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13417 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13418 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13419 TREE_OPERAND (arg0, 1));
13420 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13421 type, tem,
13422 fold_convert_loc (loc, TREE_TYPE (arg0),
13423 arg1));
13426 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13427 constant C is a power of two, i.e. a single bit. */
13428 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13429 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13430 && integer_zerop (arg1)
13431 && integer_pow2p (TREE_OPERAND (arg0, 1))
13432 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13433 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13435 tree arg00 = TREE_OPERAND (arg0, 0);
13436 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13437 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13440 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13441 when is C is a power of two, i.e. a single bit. */
13442 if (TREE_CODE (arg0) == BIT_AND_EXPR
13443 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13444 && integer_zerop (arg1)
13445 && integer_pow2p (TREE_OPERAND (arg0, 1))
13446 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13447 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13449 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13450 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13451 arg000, TREE_OPERAND (arg0, 1));
13452 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13453 tem, build_int_cst (TREE_TYPE (tem), 0));
13456 if (integer_zerop (arg1)
13457 && tree_expr_nonzero_p (arg0))
13459 tree res = constant_boolean_node (code==NE_EXPR, type);
13460 return omit_one_operand_loc (loc, type, res, arg0);
13463 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13464 if (TREE_CODE (arg0) == NEGATE_EXPR
13465 && TREE_CODE (arg1) == NEGATE_EXPR)
13466 return fold_build2_loc (loc, code, type,
13467 TREE_OPERAND (arg0, 0),
13468 fold_convert_loc (loc, TREE_TYPE (arg0),
13469 TREE_OPERAND (arg1, 0)));
13471 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13472 if (TREE_CODE (arg0) == BIT_AND_EXPR
13473 && TREE_CODE (arg1) == BIT_AND_EXPR)
13475 tree arg00 = TREE_OPERAND (arg0, 0);
13476 tree arg01 = TREE_OPERAND (arg0, 1);
13477 tree arg10 = TREE_OPERAND (arg1, 0);
13478 tree arg11 = TREE_OPERAND (arg1, 1);
13479 tree itype = TREE_TYPE (arg0);
13481 if (operand_equal_p (arg01, arg11, 0))
13482 return fold_build2_loc (loc, code, type,
13483 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13484 fold_build2_loc (loc,
13485 BIT_XOR_EXPR, itype,
13486 arg00, arg10),
13487 arg01),
13488 build_zero_cst (itype));
13490 if (operand_equal_p (arg01, arg10, 0))
13491 return fold_build2_loc (loc, code, type,
13492 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13493 fold_build2_loc (loc,
13494 BIT_XOR_EXPR, itype,
13495 arg00, arg11),
13496 arg01),
13497 build_zero_cst (itype));
13499 if (operand_equal_p (arg00, arg11, 0))
13500 return fold_build2_loc (loc, code, type,
13501 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13502 fold_build2_loc (loc,
13503 BIT_XOR_EXPR, itype,
13504 arg01, arg10),
13505 arg00),
13506 build_zero_cst (itype));
13508 if (operand_equal_p (arg00, arg10, 0))
13509 return fold_build2_loc (loc, code, type,
13510 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13511 fold_build2_loc (loc,
13512 BIT_XOR_EXPR, itype,
13513 arg01, arg11),
13514 arg00),
13515 build_zero_cst (itype));
13518 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13519 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13521 tree arg00 = TREE_OPERAND (arg0, 0);
13522 tree arg01 = TREE_OPERAND (arg0, 1);
13523 tree arg10 = TREE_OPERAND (arg1, 0);
13524 tree arg11 = TREE_OPERAND (arg1, 1);
13525 tree itype = TREE_TYPE (arg0);
13527 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13528 operand_equal_p guarantees no side-effects so we don't need
13529 to use omit_one_operand on Z. */
13530 if (operand_equal_p (arg01, arg11, 0))
13531 return fold_build2_loc (loc, code, type, arg00,
13532 fold_convert_loc (loc, TREE_TYPE (arg00),
13533 arg10));
13534 if (operand_equal_p (arg01, arg10, 0))
13535 return fold_build2_loc (loc, code, type, arg00,
13536 fold_convert_loc (loc, TREE_TYPE (arg00),
13537 arg11));
13538 if (operand_equal_p (arg00, arg11, 0))
13539 return fold_build2_loc (loc, code, type, arg01,
13540 fold_convert_loc (loc, TREE_TYPE (arg01),
13541 arg10));
13542 if (operand_equal_p (arg00, arg10, 0))
13543 return fold_build2_loc (loc, code, type, arg01,
13544 fold_convert_loc (loc, TREE_TYPE (arg01),
13545 arg11));
13547 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13548 if (TREE_CODE (arg01) == INTEGER_CST
13549 && TREE_CODE (arg11) == INTEGER_CST)
13551 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13552 fold_convert_loc (loc, itype, arg11));
13553 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13554 return fold_build2_loc (loc, code, type, tem,
13555 fold_convert_loc (loc, itype, arg10));
13559 /* Attempt to simplify equality/inequality comparisons of complex
13560 values. Only lower the comparison if the result is known or
13561 can be simplified to a single scalar comparison. */
13562 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13563 || TREE_CODE (arg0) == COMPLEX_CST)
13564 && (TREE_CODE (arg1) == COMPLEX_EXPR
13565 || TREE_CODE (arg1) == COMPLEX_CST))
13567 tree real0, imag0, real1, imag1;
13568 tree rcond, icond;
13570 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13572 real0 = TREE_OPERAND (arg0, 0);
13573 imag0 = TREE_OPERAND (arg0, 1);
13575 else
13577 real0 = TREE_REALPART (arg0);
13578 imag0 = TREE_IMAGPART (arg0);
13581 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13583 real1 = TREE_OPERAND (arg1, 0);
13584 imag1 = TREE_OPERAND (arg1, 1);
13586 else
13588 real1 = TREE_REALPART (arg1);
13589 imag1 = TREE_IMAGPART (arg1);
13592 rcond = fold_binary_loc (loc, code, type, real0, real1);
13593 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13595 if (integer_zerop (rcond))
13597 if (code == EQ_EXPR)
13598 return omit_two_operands_loc (loc, type, boolean_false_node,
13599 imag0, imag1);
13600 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13602 else
13604 if (code == NE_EXPR)
13605 return omit_two_operands_loc (loc, type, boolean_true_node,
13606 imag0, imag1);
13607 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13611 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13612 if (icond && TREE_CODE (icond) == INTEGER_CST)
13614 if (integer_zerop (icond))
13616 if (code == EQ_EXPR)
13617 return omit_two_operands_loc (loc, type, boolean_false_node,
13618 real0, real1);
13619 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13621 else
13623 if (code == NE_EXPR)
13624 return omit_two_operands_loc (loc, type, boolean_true_node,
13625 real0, real1);
13626 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13631 return NULL_TREE;
13633 case LT_EXPR:
13634 case GT_EXPR:
13635 case LE_EXPR:
13636 case GE_EXPR:
13637 tem = fold_comparison (loc, code, type, op0, op1);
13638 if (tem != NULL_TREE)
13639 return tem;
13641 /* Transform comparisons of the form X +- C CMP X. */
13642 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13643 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13644 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13645 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13646 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13647 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13649 tree arg01 = TREE_OPERAND (arg0, 1);
13650 enum tree_code code0 = TREE_CODE (arg0);
13651 int is_positive;
13653 if (TREE_CODE (arg01) == REAL_CST)
13654 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13655 else
13656 is_positive = tree_int_cst_sgn (arg01);
13658 /* (X - c) > X becomes false. */
13659 if (code == GT_EXPR
13660 && ((code0 == MINUS_EXPR && is_positive >= 0)
13661 || (code0 == PLUS_EXPR && is_positive <= 0)))
13663 if (TREE_CODE (arg01) == INTEGER_CST
13664 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13665 fold_overflow_warning (("assuming signed overflow does not "
13666 "occur when assuming that (X - c) > X "
13667 "is always false"),
13668 WARN_STRICT_OVERFLOW_ALL);
13669 return constant_boolean_node (0, type);
13672 /* Likewise (X + c) < X becomes false. */
13673 if (code == LT_EXPR
13674 && ((code0 == PLUS_EXPR && is_positive >= 0)
13675 || (code0 == MINUS_EXPR && is_positive <= 0)))
13677 if (TREE_CODE (arg01) == INTEGER_CST
13678 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13679 fold_overflow_warning (("assuming signed overflow does not "
13680 "occur when assuming that "
13681 "(X + c) < X is always false"),
13682 WARN_STRICT_OVERFLOW_ALL);
13683 return constant_boolean_node (0, type);
13686 /* Convert (X - c) <= X to true. */
13687 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13688 && code == LE_EXPR
13689 && ((code0 == MINUS_EXPR && is_positive >= 0)
13690 || (code0 == PLUS_EXPR && is_positive <= 0)))
13692 if (TREE_CODE (arg01) == INTEGER_CST
13693 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13694 fold_overflow_warning (("assuming signed overflow does not "
13695 "occur when assuming that "
13696 "(X - c) <= X is always true"),
13697 WARN_STRICT_OVERFLOW_ALL);
13698 return constant_boolean_node (1, type);
13701 /* Convert (X + c) >= X to true. */
13702 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13703 && code == GE_EXPR
13704 && ((code0 == PLUS_EXPR && is_positive >= 0)
13705 || (code0 == MINUS_EXPR && is_positive <= 0)))
13707 if (TREE_CODE (arg01) == INTEGER_CST
13708 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13709 fold_overflow_warning (("assuming signed overflow does not "
13710 "occur when assuming that "
13711 "(X + c) >= X is always true"),
13712 WARN_STRICT_OVERFLOW_ALL);
13713 return constant_boolean_node (1, type);
13716 if (TREE_CODE (arg01) == INTEGER_CST)
13718 /* Convert X + c > X and X - c < X to true for integers. */
13719 if (code == GT_EXPR
13720 && ((code0 == PLUS_EXPR && is_positive > 0)
13721 || (code0 == MINUS_EXPR && is_positive < 0)))
13723 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13724 fold_overflow_warning (("assuming signed overflow does "
13725 "not occur when assuming that "
13726 "(X + c) > X is always true"),
13727 WARN_STRICT_OVERFLOW_ALL);
13728 return constant_boolean_node (1, type);
13731 if (code == LT_EXPR
13732 && ((code0 == MINUS_EXPR && is_positive > 0)
13733 || (code0 == PLUS_EXPR && is_positive < 0)))
13735 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13736 fold_overflow_warning (("assuming signed overflow does "
13737 "not occur when assuming that "
13738 "(X - c) < X is always true"),
13739 WARN_STRICT_OVERFLOW_ALL);
13740 return constant_boolean_node (1, type);
13743 /* Convert X + c <= X and X - c >= X to false for integers. */
13744 if (code == LE_EXPR
13745 && ((code0 == PLUS_EXPR && is_positive > 0)
13746 || (code0 == MINUS_EXPR && is_positive < 0)))
13748 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13749 fold_overflow_warning (("assuming signed overflow does "
13750 "not occur when assuming that "
13751 "(X + c) <= X is always false"),
13752 WARN_STRICT_OVERFLOW_ALL);
13753 return constant_boolean_node (0, type);
13756 if (code == GE_EXPR
13757 && ((code0 == MINUS_EXPR && is_positive > 0)
13758 || (code0 == PLUS_EXPR && is_positive < 0)))
13760 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13761 fold_overflow_warning (("assuming signed overflow does "
13762 "not occur when assuming that "
13763 "(X - c) >= X is always false"),
13764 WARN_STRICT_OVERFLOW_ALL);
13765 return constant_boolean_node (0, type);
13770 /* Comparisons with the highest or lowest possible integer of
13771 the specified precision will have known values. */
13773 tree arg1_type = TREE_TYPE (arg1);
13774 unsigned int width = TYPE_PRECISION (arg1_type);
13776 if (TREE_CODE (arg1) == INTEGER_CST
13777 && width <= HOST_BITS_PER_DOUBLE_INT
13778 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13780 HOST_WIDE_INT signed_max_hi;
13781 unsigned HOST_WIDE_INT signed_max_lo;
13782 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13784 if (width <= HOST_BITS_PER_WIDE_INT)
13786 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13787 - 1;
13788 signed_max_hi = 0;
13789 max_hi = 0;
13791 if (TYPE_UNSIGNED (arg1_type))
13793 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13794 min_lo = 0;
13795 min_hi = 0;
13797 else
13799 max_lo = signed_max_lo;
13800 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13801 min_hi = -1;
13804 else
13806 width -= HOST_BITS_PER_WIDE_INT;
13807 signed_max_lo = -1;
13808 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13809 - 1;
13810 max_lo = -1;
13811 min_lo = 0;
13813 if (TYPE_UNSIGNED (arg1_type))
13815 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13816 min_hi = 0;
13818 else
13820 max_hi = signed_max_hi;
13821 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13825 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13826 && TREE_INT_CST_LOW (arg1) == max_lo)
13827 switch (code)
13829 case GT_EXPR:
13830 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13832 case GE_EXPR:
13833 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13835 case LE_EXPR:
13836 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13838 case LT_EXPR:
13839 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13841 /* The GE_EXPR and LT_EXPR cases above are not normally
13842 reached because of previous transformations. */
13844 default:
13845 break;
13847 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13848 == max_hi
13849 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13850 switch (code)
13852 case GT_EXPR:
13853 arg1 = const_binop (PLUS_EXPR, arg1,
13854 build_int_cst (TREE_TYPE (arg1), 1));
13855 return fold_build2_loc (loc, EQ_EXPR, type,
13856 fold_convert_loc (loc,
13857 TREE_TYPE (arg1), arg0),
13858 arg1);
13859 case LE_EXPR:
13860 arg1 = const_binop (PLUS_EXPR, arg1,
13861 build_int_cst (TREE_TYPE (arg1), 1));
13862 return fold_build2_loc (loc, NE_EXPR, type,
13863 fold_convert_loc (loc, TREE_TYPE (arg1),
13864 arg0),
13865 arg1);
13866 default:
13867 break;
13869 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13870 == min_hi
13871 && TREE_INT_CST_LOW (arg1) == min_lo)
13872 switch (code)
13874 case LT_EXPR:
13875 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13877 case LE_EXPR:
13878 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13880 case GE_EXPR:
13881 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13883 case GT_EXPR:
13884 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13886 default:
13887 break;
13889 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13890 == min_hi
13891 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13892 switch (code)
13894 case GE_EXPR:
13895 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13896 return fold_build2_loc (loc, NE_EXPR, type,
13897 fold_convert_loc (loc,
13898 TREE_TYPE (arg1), arg0),
13899 arg1);
13900 case LT_EXPR:
13901 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13902 return fold_build2_loc (loc, EQ_EXPR, type,
13903 fold_convert_loc (loc, TREE_TYPE (arg1),
13904 arg0),
13905 arg1);
13906 default:
13907 break;
13910 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13911 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13912 && TYPE_UNSIGNED (arg1_type)
13913 /* We will flip the signedness of the comparison operator
13914 associated with the mode of arg1, so the sign bit is
13915 specified by this mode. Check that arg1 is the signed
13916 max associated with this sign bit. */
13917 && width == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13918 /* signed_type does not work on pointer types. */
13919 && INTEGRAL_TYPE_P (arg1_type))
13921 /* The following case also applies to X < signed_max+1
13922 and X >= signed_max+1 because previous transformations. */
13923 if (code == LE_EXPR || code == GT_EXPR)
13925 tree st = signed_type_for (arg1_type);
13926 return fold_build2_loc (loc,
13927 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13928 type, fold_convert_loc (loc, st, arg0),
13929 build_int_cst (st, 0));
13935 /* If we are comparing an ABS_EXPR with a constant, we can
13936 convert all the cases into explicit comparisons, but they may
13937 well not be faster than doing the ABS and one comparison.
13938 But ABS (X) <= C is a range comparison, which becomes a subtraction
13939 and a comparison, and is probably faster. */
13940 if (code == LE_EXPR
13941 && TREE_CODE (arg1) == INTEGER_CST
13942 && TREE_CODE (arg0) == ABS_EXPR
13943 && ! TREE_SIDE_EFFECTS (arg0)
13944 && (0 != (tem = negate_expr (arg1)))
13945 && TREE_CODE (tem) == INTEGER_CST
13946 && !TREE_OVERFLOW (tem))
13947 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13948 build2 (GE_EXPR, type,
13949 TREE_OPERAND (arg0, 0), tem),
13950 build2 (LE_EXPR, type,
13951 TREE_OPERAND (arg0, 0), arg1));
13953 /* Convert ABS_EXPR<x> >= 0 to true. */
13954 strict_overflow_p = false;
13955 if (code == GE_EXPR
13956 && (integer_zerop (arg1)
13957 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13958 && real_zerop (arg1)))
13959 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13961 if (strict_overflow_p)
13962 fold_overflow_warning (("assuming signed overflow does not occur "
13963 "when simplifying comparison of "
13964 "absolute value and zero"),
13965 WARN_STRICT_OVERFLOW_CONDITIONAL);
13966 return omit_one_operand_loc (loc, type,
13967 constant_boolean_node (true, type),
13968 arg0);
13971 /* Convert ABS_EXPR<x> < 0 to false. */
13972 strict_overflow_p = false;
13973 if (code == LT_EXPR
13974 && (integer_zerop (arg1) || real_zerop (arg1))
13975 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13977 if (strict_overflow_p)
13978 fold_overflow_warning (("assuming signed overflow does not occur "
13979 "when simplifying comparison of "
13980 "absolute value and zero"),
13981 WARN_STRICT_OVERFLOW_CONDITIONAL);
13982 return omit_one_operand_loc (loc, type,
13983 constant_boolean_node (false, type),
13984 arg0);
13987 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13988 and similarly for >= into !=. */
13989 if ((code == LT_EXPR || code == GE_EXPR)
13990 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13991 && TREE_CODE (arg1) == LSHIFT_EXPR
13992 && integer_onep (TREE_OPERAND (arg1, 0)))
13993 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13994 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13995 TREE_OPERAND (arg1, 1)),
13996 build_zero_cst (TREE_TYPE (arg0)));
13998 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13999 otherwise Y might be >= # of bits in X's type and thus e.g.
14000 (unsigned char) (1 << Y) for Y 15 might be 0.
14001 If the cast is widening, then 1 << Y should have unsigned type,
14002 otherwise if Y is number of bits in the signed shift type minus 1,
14003 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
14004 31 might be 0xffffffff80000000. */
14005 if ((code == LT_EXPR || code == GE_EXPR)
14006 && TYPE_UNSIGNED (TREE_TYPE (arg0))
14007 && CONVERT_EXPR_P (arg1)
14008 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
14009 && (TYPE_PRECISION (TREE_TYPE (arg1))
14010 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
14011 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
14012 || (TYPE_PRECISION (TREE_TYPE (arg1))
14013 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
14014 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
14016 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
14017 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
14018 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
14019 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
14020 build_zero_cst (TREE_TYPE (arg0)));
14023 return NULL_TREE;
14025 case UNORDERED_EXPR:
14026 case ORDERED_EXPR:
14027 case UNLT_EXPR:
14028 case UNLE_EXPR:
14029 case UNGT_EXPR:
14030 case UNGE_EXPR:
14031 case UNEQ_EXPR:
14032 case LTGT_EXPR:
14033 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
14035 t1 = fold_relational_const (code, type, arg0, arg1);
14036 if (t1 != NULL_TREE)
14037 return t1;
14040 /* If the first operand is NaN, the result is constant. */
14041 if (TREE_CODE (arg0) == REAL_CST
14042 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
14043 && (code != LTGT_EXPR || ! flag_trapping_math))
14045 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
14046 ? integer_zero_node
14047 : integer_one_node;
14048 return omit_one_operand_loc (loc, type, t1, arg1);
14051 /* If the second operand is NaN, the result is constant. */
14052 if (TREE_CODE (arg1) == REAL_CST
14053 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
14054 && (code != LTGT_EXPR || ! flag_trapping_math))
14056 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
14057 ? integer_zero_node
14058 : integer_one_node;
14059 return omit_one_operand_loc (loc, type, t1, arg0);
14062 /* Simplify unordered comparison of something with itself. */
14063 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
14064 && operand_equal_p (arg0, arg1, 0))
14065 return constant_boolean_node (1, type);
14067 if (code == LTGT_EXPR
14068 && !flag_trapping_math
14069 && operand_equal_p (arg0, arg1, 0))
14070 return constant_boolean_node (0, type);
14072 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
14074 tree targ0 = strip_float_extensions (arg0);
14075 tree targ1 = strip_float_extensions (arg1);
14076 tree newtype = TREE_TYPE (targ0);
14078 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
14079 newtype = TREE_TYPE (targ1);
14081 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
14082 return fold_build2_loc (loc, code, type,
14083 fold_convert_loc (loc, newtype, targ0),
14084 fold_convert_loc (loc, newtype, targ1));
14087 return NULL_TREE;
14089 case COMPOUND_EXPR:
14090 /* When pedantic, a compound expression can be neither an lvalue
14091 nor an integer constant expression. */
14092 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
14093 return NULL_TREE;
14094 /* Don't let (0, 0) be null pointer constant. */
14095 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
14096 : fold_convert_loc (loc, type, arg1);
14097 return pedantic_non_lvalue_loc (loc, tem);
14099 case COMPLEX_EXPR:
14100 if ((TREE_CODE (arg0) == REAL_CST
14101 && TREE_CODE (arg1) == REAL_CST)
14102 || (TREE_CODE (arg0) == INTEGER_CST
14103 && TREE_CODE (arg1) == INTEGER_CST))
14104 return build_complex (type, arg0, arg1);
14105 if (TREE_CODE (arg0) == REALPART_EXPR
14106 && TREE_CODE (arg1) == IMAGPART_EXPR
14107 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
14108 && operand_equal_p (TREE_OPERAND (arg0, 0),
14109 TREE_OPERAND (arg1, 0), 0))
14110 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
14111 TREE_OPERAND (arg1, 0));
14112 return NULL_TREE;
14114 case ASSERT_EXPR:
14115 /* An ASSERT_EXPR should never be passed to fold_binary. */
14116 gcc_unreachable ();
14118 case VEC_PACK_TRUNC_EXPR:
14119 case VEC_PACK_FIX_TRUNC_EXPR:
14121 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14122 tree *elts;
14124 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
14125 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
14126 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14127 return NULL_TREE;
14129 elts = XALLOCAVEC (tree, nelts);
14130 if (!vec_cst_ctor_to_array (arg0, elts)
14131 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
14132 return NULL_TREE;
14134 for (i = 0; i < nelts; i++)
14136 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
14137 ? NOP_EXPR : FIX_TRUNC_EXPR,
14138 TREE_TYPE (type), elts[i]);
14139 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
14140 return NULL_TREE;
14143 return build_vector (type, elts);
14146 case VEC_WIDEN_MULT_LO_EXPR:
14147 case VEC_WIDEN_MULT_HI_EXPR:
14148 case VEC_WIDEN_MULT_EVEN_EXPR:
14149 case VEC_WIDEN_MULT_ODD_EXPR:
14151 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
14152 unsigned int out, ofs, scale;
14153 tree *elts;
14155 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
14156 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
14157 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14158 return NULL_TREE;
14160 elts = XALLOCAVEC (tree, nelts * 4);
14161 if (!vec_cst_ctor_to_array (arg0, elts)
14162 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
14163 return NULL_TREE;
14165 if (code == VEC_WIDEN_MULT_LO_EXPR)
14166 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
14167 else if (code == VEC_WIDEN_MULT_HI_EXPR)
14168 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14169 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14170 scale = 1, ofs = 0;
14171 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14172 scale = 1, ofs = 1;
14174 for (out = 0; out < nelts; out++)
14176 unsigned int in1 = (out << scale) + ofs;
14177 unsigned int in2 = in1 + nelts * 2;
14178 tree t1, t2;
14180 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14181 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14183 if (t1 == NULL_TREE || t2 == NULL_TREE)
14184 return NULL_TREE;
14185 elts[out] = const_binop (MULT_EXPR, t1, t2);
14186 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14187 return NULL_TREE;
14190 return build_vector (type, elts);
14193 default:
14194 return NULL_TREE;
14195 } /* switch (code) */
14198 /* Fold a binary expression of code CODE and type TYPE with operands
14199 OP0 and OP1. Return the folded expression if folding is
14200 successful. Otherwise, return NULL_TREE.
14201 This is a wrapper around fold_binary_1 function (which does the
14202 actual folding). Set the EXPR_FOLDED flag of the folded expression
14203 if folding is successful. */
14204 tree
14205 fold_binary_loc (location_t loc,
14206 enum tree_code code, tree type, tree op0, tree op1)
14208 tree tem = fold_binary_loc_1 (loc, code, type, op0, op1);
14209 if (tem)
14210 set_expr_folded_flag (tem);
14211 return tem;
14214 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14215 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14216 of GOTO_EXPR. */
14218 static tree
14219 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14221 switch (TREE_CODE (*tp))
14223 case LABEL_EXPR:
14224 return *tp;
14226 case GOTO_EXPR:
14227 *walk_subtrees = 0;
14229 /* ... fall through ... */
14231 default:
14232 return NULL_TREE;
14236 /* Return whether the sub-tree ST contains a label which is accessible from
14237 outside the sub-tree. */
14239 static bool
14240 contains_label_p (tree st)
14242 return
14243 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14246 /* Fold a ternary expression of code CODE and type TYPE with operands
14247 OP0, OP1, and OP2. Return the folded expression if folding is
14248 successful. Otherwise, return NULL_TREE. */
14250 static tree
14251 fold_ternary_loc_1 (location_t loc, enum tree_code code, tree type,
14252 tree op0, tree op1, tree op2)
14254 tree tem;
14255 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14256 enum tree_code_class kind = TREE_CODE_CLASS (code);
14258 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14259 && TREE_CODE_LENGTH (code) == 3);
14261 /* Strip any conversions that don't change the mode. This is safe
14262 for every expression, except for a comparison expression because
14263 its signedness is derived from its operands. So, in the latter
14264 case, only strip conversions that don't change the signedness.
14266 Note that this is done as an internal manipulation within the
14267 constant folder, in order to find the simplest representation of
14268 the arguments so that their form can be studied. In any cases,
14269 the appropriate type conversions should be put back in the tree
14270 that will get out of the constant folder. */
14271 if (op0)
14273 arg0 = op0;
14274 STRIP_NOPS (arg0);
14277 if (op1)
14279 arg1 = op1;
14280 STRIP_NOPS (arg1);
14283 if (op2)
14285 arg2 = op2;
14286 STRIP_NOPS (arg2);
14289 switch (code)
14291 case COMPONENT_REF:
14292 if (TREE_CODE (arg0) == CONSTRUCTOR
14293 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14295 unsigned HOST_WIDE_INT idx;
14296 tree field, value;
14297 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14298 if (field == arg1)
14299 return value;
14301 return NULL_TREE;
14303 case COND_EXPR:
14304 case VEC_COND_EXPR:
14305 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14306 so all simple results must be passed through pedantic_non_lvalue. */
14307 if (TREE_CODE (arg0) == INTEGER_CST)
14309 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14310 tem = integer_zerop (arg0) ? op2 : op1;
14311 /* Only optimize constant conditions when the selected branch
14312 has the same type as the COND_EXPR. This avoids optimizing
14313 away "c ? x : throw", where the throw has a void type.
14314 Avoid throwing away that operand which contains label. */
14315 if ((!TREE_SIDE_EFFECTS (unused_op)
14316 || !contains_label_p (unused_op))
14317 && (! VOID_TYPE_P (TREE_TYPE (tem))
14318 || VOID_TYPE_P (type)))
14319 return pedantic_non_lvalue_loc (loc, tem);
14320 return NULL_TREE;
14322 else if (TREE_CODE (arg0) == VECTOR_CST)
14324 if (integer_all_onesp (arg0))
14325 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14326 if (integer_zerop (arg0))
14327 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14329 if ((TREE_CODE (arg1) == VECTOR_CST
14330 || TREE_CODE (arg1) == CONSTRUCTOR)
14331 && (TREE_CODE (arg2) == VECTOR_CST
14332 || TREE_CODE (arg2) == CONSTRUCTOR))
14334 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14335 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14336 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14337 for (i = 0; i < nelts; i++)
14339 tree val = VECTOR_CST_ELT (arg0, i);
14340 if (integer_all_onesp (val))
14341 sel[i] = i;
14342 else if (integer_zerop (val))
14343 sel[i] = nelts + i;
14344 else /* Currently unreachable. */
14345 return NULL_TREE;
14347 tree t = fold_vec_perm (type, arg1, arg2, sel);
14348 if (t != NULL_TREE)
14349 return t;
14353 if (operand_equal_p (arg1, op2, 0))
14354 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14356 /* If we have A op B ? A : C, we may be able to convert this to a
14357 simpler expression, depending on the operation and the values
14358 of B and C. Signed zeros prevent all of these transformations,
14359 for reasons given above each one.
14361 Also try swapping the arguments and inverting the conditional. */
14362 if (COMPARISON_CLASS_P (arg0)
14363 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14364 arg1, TREE_OPERAND (arg0, 1))
14365 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14367 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14368 if (tem)
14369 return tem;
14372 if (COMPARISON_CLASS_P (arg0)
14373 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14374 op2,
14375 TREE_OPERAND (arg0, 1))
14376 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14378 location_t loc0 = expr_location_or (arg0, loc);
14379 tem = fold_invert_truthvalue (loc0, arg0);
14380 if (tem && COMPARISON_CLASS_P (tem))
14382 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14383 if (tem)
14384 return tem;
14388 /* If the second operand is simpler than the third, swap them
14389 since that produces better jump optimization results. */
14390 if (truth_value_p (TREE_CODE (arg0))
14391 && tree_swap_operands_p (op1, op2, false))
14393 location_t loc0 = expr_location_or (arg0, loc);
14394 /* See if this can be inverted. If it can't, possibly because
14395 it was a floating-point inequality comparison, don't do
14396 anything. */
14397 tem = fold_invert_truthvalue (loc0, arg0);
14398 if (tem)
14399 return fold_build3_loc (loc, code, type, tem, op2, op1);
14402 /* Convert A ? 1 : 0 to simply A. */
14403 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14404 : (integer_onep (op1)
14405 && !VECTOR_TYPE_P (type)))
14406 && integer_zerop (op2)
14407 /* If we try to convert OP0 to our type, the
14408 call to fold will try to move the conversion inside
14409 a COND, which will recurse. In that case, the COND_EXPR
14410 is probably the best choice, so leave it alone. */
14411 && type == TREE_TYPE (arg0))
14412 return pedantic_non_lvalue_loc (loc, arg0);
14414 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14415 over COND_EXPR in cases such as floating point comparisons. */
14416 if (integer_zerop (op1)
14417 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14418 : (integer_onep (op2)
14419 && !VECTOR_TYPE_P (type)))
14420 && truth_value_p (TREE_CODE (arg0)))
14421 return pedantic_non_lvalue_loc (loc,
14422 fold_convert_loc (loc, type,
14423 invert_truthvalue_loc (loc,
14424 arg0)));
14426 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14427 if (TREE_CODE (arg0) == LT_EXPR
14428 && integer_zerop (TREE_OPERAND (arg0, 1))
14429 && integer_zerop (op2)
14430 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14432 /* sign_bit_p looks through both zero and sign extensions,
14433 but for this optimization only sign extensions are
14434 usable. */
14435 tree tem2 = TREE_OPERAND (arg0, 0);
14436 while (tem != tem2)
14438 if (TREE_CODE (tem2) != NOP_EXPR
14439 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14441 tem = NULL_TREE;
14442 break;
14444 tem2 = TREE_OPERAND (tem2, 0);
14446 /* sign_bit_p only checks ARG1 bits within A's precision.
14447 If <sign bit of A> has wider type than A, bits outside
14448 of A's precision in <sign bit of A> need to be checked.
14449 If they are all 0, this optimization needs to be done
14450 in unsigned A's type, if they are all 1 in signed A's type,
14451 otherwise this can't be done. */
14452 if (tem
14453 && TYPE_PRECISION (TREE_TYPE (tem))
14454 < TYPE_PRECISION (TREE_TYPE (arg1))
14455 && TYPE_PRECISION (TREE_TYPE (tem))
14456 < TYPE_PRECISION (type))
14458 unsigned HOST_WIDE_INT mask_lo;
14459 HOST_WIDE_INT mask_hi;
14460 int inner_width, outer_width;
14461 tree tem_type;
14463 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14464 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14465 if (outer_width > TYPE_PRECISION (type))
14466 outer_width = TYPE_PRECISION (type);
14468 if (outer_width > HOST_BITS_PER_WIDE_INT)
14470 mask_hi = (HOST_WIDE_INT_M1U
14471 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14472 mask_lo = -1;
14474 else
14476 mask_hi = 0;
14477 mask_lo = (HOST_WIDE_INT_M1U
14478 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14480 if (inner_width > HOST_BITS_PER_WIDE_INT)
14482 mask_hi &= ~(HOST_WIDE_INT_M1U
14483 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14484 mask_lo = 0;
14486 else
14487 mask_lo &= ~(HOST_WIDE_INT_M1U
14488 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14490 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14491 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14493 tem_type = signed_type_for (TREE_TYPE (tem));
14494 tem = fold_convert_loc (loc, tem_type, tem);
14496 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14497 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14499 tem_type = unsigned_type_for (TREE_TYPE (tem));
14500 tem = fold_convert_loc (loc, tem_type, tem);
14502 else
14503 tem = NULL;
14506 if (tem)
14507 return
14508 fold_convert_loc (loc, type,
14509 fold_build2_loc (loc, BIT_AND_EXPR,
14510 TREE_TYPE (tem), tem,
14511 fold_convert_loc (loc,
14512 TREE_TYPE (tem),
14513 arg1)));
14516 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14517 already handled above. */
14518 if (TREE_CODE (arg0) == BIT_AND_EXPR
14519 && integer_onep (TREE_OPERAND (arg0, 1))
14520 && integer_zerop (op2)
14521 && integer_pow2p (arg1))
14523 tree tem = TREE_OPERAND (arg0, 0);
14524 STRIP_NOPS (tem);
14525 if (TREE_CODE (tem) == RSHIFT_EXPR
14526 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14527 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14528 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14529 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14530 TREE_OPERAND (tem, 0), arg1);
14533 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14534 is probably obsolete because the first operand should be a
14535 truth value (that's why we have the two cases above), but let's
14536 leave it in until we can confirm this for all front-ends. */
14537 if (integer_zerop (op2)
14538 && TREE_CODE (arg0) == NE_EXPR
14539 && integer_zerop (TREE_OPERAND (arg0, 1))
14540 && integer_pow2p (arg1)
14541 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14542 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14543 arg1, OEP_ONLY_CONST))
14544 return pedantic_non_lvalue_loc (loc,
14545 fold_convert_loc (loc, type,
14546 TREE_OPERAND (arg0, 0)));
14548 /* Disable the transformations below for vectors, since
14549 fold_binary_op_with_conditional_arg may undo them immediately,
14550 yielding an infinite loop. */
14551 if (code == VEC_COND_EXPR)
14552 return NULL_TREE;
14554 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14555 if (integer_zerop (op2)
14556 && truth_value_p (TREE_CODE (arg0))
14557 && truth_value_p (TREE_CODE (arg1))
14558 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14559 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14560 : TRUTH_ANDIF_EXPR,
14561 type, fold_convert_loc (loc, type, arg0), arg1);
14563 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14564 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14565 && truth_value_p (TREE_CODE (arg0))
14566 && truth_value_p (TREE_CODE (arg1))
14567 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14569 location_t loc0 = expr_location_or (arg0, loc);
14570 /* Only perform transformation if ARG0 is easily inverted. */
14571 tem = fold_invert_truthvalue (loc0, arg0);
14572 if (tem)
14573 return fold_build2_loc (loc, code == VEC_COND_EXPR
14574 ? BIT_IOR_EXPR
14575 : TRUTH_ORIF_EXPR,
14576 type, fold_convert_loc (loc, type, tem),
14577 arg1);
14580 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14581 if (integer_zerop (arg1)
14582 && truth_value_p (TREE_CODE (arg0))
14583 && truth_value_p (TREE_CODE (op2))
14584 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14586 location_t loc0 = expr_location_or (arg0, loc);
14587 /* Only perform transformation if ARG0 is easily inverted. */
14588 tem = fold_invert_truthvalue (loc0, arg0);
14589 if (tem)
14590 return fold_build2_loc (loc, code == VEC_COND_EXPR
14591 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14592 type, fold_convert_loc (loc, type, tem),
14593 op2);
14596 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14597 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14598 && truth_value_p (TREE_CODE (arg0))
14599 && truth_value_p (TREE_CODE (op2))
14600 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14601 return fold_build2_loc (loc, code == VEC_COND_EXPR
14602 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14603 type, fold_convert_loc (loc, type, arg0), op2);
14605 return NULL_TREE;
14607 case CALL_EXPR:
14608 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14609 of fold_ternary on them. */
14610 gcc_unreachable ();
14612 case BIT_FIELD_REF:
14613 if ((TREE_CODE (arg0) == VECTOR_CST
14614 || (TREE_CODE (arg0) == CONSTRUCTOR
14615 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14616 && (type == TREE_TYPE (TREE_TYPE (arg0))
14617 || (TREE_CODE (type) == VECTOR_TYPE
14618 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14620 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14621 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14622 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14623 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14625 if (n != 0
14626 && (idx % width) == 0
14627 && (n % width) == 0
14628 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14630 idx = idx / width;
14631 n = n / width;
14633 if (TREE_CODE (arg0) == VECTOR_CST)
14635 if (n == 1)
14636 return VECTOR_CST_ELT (arg0, idx);
14638 tree *vals = XALLOCAVEC (tree, n);
14639 for (unsigned i = 0; i < n; ++i)
14640 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14641 return build_vector (type, vals);
14644 /* Constructor elements can be subvectors. */
14645 unsigned HOST_WIDE_INT k = 1;
14646 if (CONSTRUCTOR_NELTS (arg0) != 0)
14648 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14649 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14650 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14653 /* We keep an exact subset of the constructor elements. */
14654 if ((idx % k) == 0 && (n % k) == 0)
14656 if (CONSTRUCTOR_NELTS (arg0) == 0)
14657 return build_constructor (type, NULL);
14658 idx /= k;
14659 n /= k;
14660 if (n == 1)
14662 if (idx < CONSTRUCTOR_NELTS (arg0))
14663 return CONSTRUCTOR_ELT (arg0, idx)->value;
14664 return build_zero_cst (type);
14667 vec<constructor_elt, va_gc> *vals;
14668 vec_alloc (vals, n);
14669 for (unsigned i = 0;
14670 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14671 ++i)
14672 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14673 CONSTRUCTOR_ELT
14674 (arg0, idx + i)->value);
14675 return build_constructor (type, vals);
14677 /* The bitfield references a single constructor element. */
14678 else if (idx + n <= (idx / k + 1) * k)
14680 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14681 return build_zero_cst (type);
14682 else if (n == k)
14683 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14684 else
14685 return fold_build3_loc (loc, code, type,
14686 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14687 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14692 /* A bit-field-ref that referenced the full argument can be stripped. */
14693 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14694 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14695 && integer_zerop (op2))
14696 return fold_convert_loc (loc, type, arg0);
14698 /* On constants we can use native encode/interpret to constant
14699 fold (nearly) all BIT_FIELD_REFs. */
14700 if (CONSTANT_CLASS_P (arg0)
14701 && can_native_interpret_type_p (type)
14702 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14703 /* This limitation should not be necessary, we just need to
14704 round this up to mode size. */
14705 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14706 /* Need bit-shifting of the buffer to relax the following. */
14707 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14709 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14710 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14711 unsigned HOST_WIDE_INT clen;
14712 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14713 /* ??? We cannot tell native_encode_expr to start at
14714 some random byte only. So limit us to a reasonable amount
14715 of work. */
14716 if (clen <= 4096)
14718 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14719 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14720 if (len > 0
14721 && len * BITS_PER_UNIT >= bitpos + bitsize)
14723 tree v = native_interpret_expr (type,
14724 b + bitpos / BITS_PER_UNIT,
14725 bitsize / BITS_PER_UNIT);
14726 if (v)
14727 return v;
14732 return NULL_TREE;
14734 case FMA_EXPR:
14735 /* For integers we can decompose the FMA if possible. */
14736 if (TREE_CODE (arg0) == INTEGER_CST
14737 && TREE_CODE (arg1) == INTEGER_CST)
14738 return fold_build2_loc (loc, PLUS_EXPR, type,
14739 const_binop (MULT_EXPR, arg0, arg1), arg2);
14740 if (integer_zerop (arg2))
14741 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14743 return fold_fma (loc, type, arg0, arg1, arg2);
14745 case VEC_PERM_EXPR:
14746 if (TREE_CODE (arg2) == VECTOR_CST)
14748 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14749 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14750 tree t;
14751 bool need_mask_canon = false;
14752 bool all_in_vec0 = true;
14753 bool all_in_vec1 = true;
14754 bool maybe_identity = true;
14755 bool single_arg = (op0 == op1);
14756 bool changed = false;
14758 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14759 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14760 for (i = 0; i < nelts; i++)
14762 tree val = VECTOR_CST_ELT (arg2, i);
14763 if (TREE_CODE (val) != INTEGER_CST)
14764 return NULL_TREE;
14766 sel[i] = TREE_INT_CST_LOW (val) & mask;
14767 if (TREE_INT_CST_HIGH (val)
14768 || ((unsigned HOST_WIDE_INT)
14769 TREE_INT_CST_LOW (val) != sel[i]))
14770 need_mask_canon = true;
14772 if (sel[i] < nelts)
14773 all_in_vec1 = false;
14774 else
14775 all_in_vec0 = false;
14777 if ((sel[i] & (nelts-1)) != i)
14778 maybe_identity = false;
14781 if (maybe_identity)
14783 if (all_in_vec0)
14784 return op0;
14785 if (all_in_vec1)
14786 return op1;
14789 if (all_in_vec0)
14790 op1 = op0;
14791 else if (all_in_vec1)
14793 op0 = op1;
14794 for (i = 0; i < nelts; i++)
14795 sel[i] -= nelts;
14796 need_mask_canon = true;
14799 if ((TREE_CODE (op0) == VECTOR_CST
14800 || TREE_CODE (op0) == CONSTRUCTOR)
14801 && (TREE_CODE (op1) == VECTOR_CST
14802 || TREE_CODE (op1) == CONSTRUCTOR))
14804 t = fold_vec_perm (type, op0, op1, sel);
14805 if (t != NULL_TREE)
14806 return t;
14809 if (op0 == op1 && !single_arg)
14810 changed = true;
14812 if (need_mask_canon && arg2 == op2)
14814 tree *tsel = XALLOCAVEC (tree, nelts);
14815 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14816 for (i = 0; i < nelts; i++)
14817 tsel[i] = build_int_cst (eltype, sel[i]);
14818 op2 = build_vector (TREE_TYPE (arg2), tsel);
14819 changed = true;
14822 if (changed)
14823 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14825 return NULL_TREE;
14827 default:
14828 return NULL_TREE;
14829 } /* switch (code) */
14832 /* Fold a ternary expression of code CODE and type TYPE with operands
14833 OP0, OP1, and OP2. Return the folded expression if folding is
14834 successful. Otherwise, return NULL_TREE.
14835 This is a wrapper around fold_ternary_1 function (which does the
14836 actual folding). Set the EXPR_FOLDED flag of the folded expression
14837 if folding is successful. */
14839 tree
14840 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14841 tree op0, tree op1, tree op2)
14843 tree tem = fold_ternary_loc_1 (loc, code, type, op0, op1, op2);
14844 if (tem)
14845 set_expr_folded_flag (tem);
14846 return tem;
14849 /* Perform constant folding and related simplification of EXPR.
14850 The related simplifications include x*1 => x, x*0 => 0, etc.,
14851 and application of the associative law.
14852 NOP_EXPR conversions may be removed freely (as long as we
14853 are careful not to change the type of the overall expression).
14854 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14855 but we can constant-fold them if they have constant operands. */
14857 #ifdef ENABLE_FOLD_CHECKING
14858 # define fold(x) fold_1 (x)
14859 static tree fold_1 (tree);
14860 static
14861 #endif
14862 tree
14863 fold (tree expr)
14865 const tree t = expr;
14866 enum tree_code code = TREE_CODE (t);
14867 enum tree_code_class kind = TREE_CODE_CLASS (code);
14868 tree tem;
14869 location_t loc = EXPR_LOCATION (expr);
14871 /* Return right away if a constant. */
14872 if (kind == tcc_constant)
14873 return t;
14875 /* CALL_EXPR-like objects with variable numbers of operands are
14876 treated specially. */
14877 if (kind == tcc_vl_exp)
14879 if (code == CALL_EXPR)
14881 tem = fold_call_expr (loc, expr, false);
14882 return tem ? tem : expr;
14884 return expr;
14887 if (IS_EXPR_CODE_CLASS (kind))
14889 tree type = TREE_TYPE (t);
14890 tree op0, op1, op2;
14892 switch (TREE_CODE_LENGTH (code))
14894 case 1:
14895 op0 = TREE_OPERAND (t, 0);
14896 tem = fold_unary_loc (loc, code, type, op0);
14897 return tem ? tem : expr;
14898 case 2:
14899 op0 = TREE_OPERAND (t, 0);
14900 op1 = TREE_OPERAND (t, 1);
14901 tem = fold_binary_loc (loc, code, type, op0, op1);
14902 return tem ? tem : expr;
14903 case 3:
14904 op0 = TREE_OPERAND (t, 0);
14905 op1 = TREE_OPERAND (t, 1);
14906 op2 = TREE_OPERAND (t, 2);
14907 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14908 return tem ? tem : expr;
14909 default:
14910 break;
14914 switch (code)
14916 case ARRAY_REF:
14918 tree op0 = TREE_OPERAND (t, 0);
14919 tree op1 = TREE_OPERAND (t, 1);
14921 if (TREE_CODE (op1) == INTEGER_CST
14922 && TREE_CODE (op0) == CONSTRUCTOR
14923 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14925 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14926 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14927 unsigned HOST_WIDE_INT begin = 0;
14929 /* Find a matching index by means of a binary search. */
14930 while (begin != end)
14932 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14933 tree index = (*elts)[middle].index;
14935 if (TREE_CODE (index) == INTEGER_CST
14936 && tree_int_cst_lt (index, op1))
14937 begin = middle + 1;
14938 else if (TREE_CODE (index) == INTEGER_CST
14939 && tree_int_cst_lt (op1, index))
14940 end = middle;
14941 else if (TREE_CODE (index) == RANGE_EXPR
14942 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14943 begin = middle + 1;
14944 else if (TREE_CODE (index) == RANGE_EXPR
14945 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14946 end = middle;
14947 else
14948 return (*elts)[middle].value;
14952 return t;
14955 /* Return a VECTOR_CST if possible. */
14956 case CONSTRUCTOR:
14958 tree type = TREE_TYPE (t);
14959 if (TREE_CODE (type) != VECTOR_TYPE)
14960 return t;
14962 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14963 unsigned HOST_WIDE_INT idx, pos = 0;
14964 tree value;
14966 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14968 if (!CONSTANT_CLASS_P (value))
14969 return t;
14970 if (TREE_CODE (value) == VECTOR_CST)
14972 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14973 vec[pos++] = VECTOR_CST_ELT (value, i);
14975 else
14976 vec[pos++] = value;
14978 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14979 vec[pos] = build_zero_cst (TREE_TYPE (type));
14981 return build_vector (type, vec);
14984 case CONST_DECL:
14985 return fold (DECL_INITIAL (t));
14987 default:
14988 return t;
14989 } /* switch (code) */
14992 #ifdef ENABLE_FOLD_CHECKING
14993 #undef fold
14995 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14996 hash_table <pointer_hash <tree_node> >);
14997 static void fold_check_failed (const_tree, const_tree);
14998 void print_fold_checksum (const_tree);
15000 /* When --enable-checking=fold, compute a digest of expr before
15001 and after actual fold call to see if fold did not accidentally
15002 change original expr. */
15004 tree
15005 fold (tree expr)
15007 tree ret;
15008 struct md5_ctx ctx;
15009 unsigned char checksum_before[16], checksum_after[16];
15010 hash_table <pointer_hash <tree_node> > ht;
15012 ht.create (32);
15013 md5_init_ctx (&ctx);
15014 fold_checksum_tree (expr, &ctx, ht);
15015 md5_finish_ctx (&ctx, checksum_before);
15016 ht.empty ();
15018 ret = fold_1 (expr);
15020 md5_init_ctx (&ctx);
15021 fold_checksum_tree (expr, &ctx, ht);
15022 md5_finish_ctx (&ctx, checksum_after);
15023 ht.dispose ();
15025 if (memcmp (checksum_before, checksum_after, 16))
15026 fold_check_failed (expr, ret);
15028 return ret;
15031 void
15032 print_fold_checksum (const_tree expr)
15034 struct md5_ctx ctx;
15035 unsigned char checksum[16], cnt;
15036 hash_table <pointer_hash <tree_node> > ht;
15038 ht.create (32);
15039 md5_init_ctx (&ctx);
15040 fold_checksum_tree (expr, &ctx, ht);
15041 md5_finish_ctx (&ctx, checksum);
15042 ht.dispose ();
15043 for (cnt = 0; cnt < 16; ++cnt)
15044 fprintf (stderr, "%02x", checksum[cnt]);
15045 putc ('\n', stderr);
15048 static void
15049 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
15051 internal_error ("fold check: original tree changed by fold");
15054 static void
15055 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
15056 hash_table <pointer_hash <tree_node> > ht)
15058 tree_node **slot;
15059 enum tree_code code;
15060 union tree_node buf;
15061 int i, len;
15063 recursive_label:
15064 if (expr == NULL)
15065 return;
15066 slot = ht.find_slot (expr, INSERT);
15067 if (*slot != NULL)
15068 return;
15069 *slot = CONST_CAST_TREE (expr);
15070 code = TREE_CODE (expr);
15071 if (TREE_CODE_CLASS (code) == tcc_declaration
15072 && DECL_ASSEMBLER_NAME_SET_P (expr))
15074 /* Allow DECL_ASSEMBLER_NAME to be modified. */
15075 memcpy ((char *) &buf, expr, tree_size (expr));
15076 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
15077 expr = (tree) &buf;
15079 else if (TREE_CODE_CLASS (code) == tcc_type
15080 && (TYPE_POINTER_TO (expr)
15081 || TYPE_REFERENCE_TO (expr)
15082 || TYPE_CACHED_VALUES_P (expr)
15083 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
15084 || TYPE_NEXT_VARIANT (expr)))
15086 /* Allow these fields to be modified. */
15087 tree tmp;
15088 memcpy ((char *) &buf, expr, tree_size (expr));
15089 expr = tmp = (tree) &buf;
15090 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
15091 TYPE_POINTER_TO (tmp) = NULL;
15092 TYPE_REFERENCE_TO (tmp) = NULL;
15093 TYPE_NEXT_VARIANT (tmp) = NULL;
15094 if (TYPE_CACHED_VALUES_P (tmp))
15096 TYPE_CACHED_VALUES_P (tmp) = 0;
15097 TYPE_CACHED_VALUES (tmp) = NULL;
15100 md5_process_bytes (expr, tree_size (expr), ctx);
15101 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
15102 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
15103 if (TREE_CODE_CLASS (code) != tcc_type
15104 && TREE_CODE_CLASS (code) != tcc_declaration
15105 && code != TREE_LIST
15106 && code != SSA_NAME
15107 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
15108 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
15109 switch (TREE_CODE_CLASS (code))
15111 case tcc_constant:
15112 switch (code)
15114 case STRING_CST:
15115 md5_process_bytes (TREE_STRING_POINTER (expr),
15116 TREE_STRING_LENGTH (expr), ctx);
15117 break;
15118 case COMPLEX_CST:
15119 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
15120 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
15121 break;
15122 case VECTOR_CST:
15123 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
15124 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
15125 break;
15126 default:
15127 break;
15129 break;
15130 case tcc_exceptional:
15131 switch (code)
15133 case TREE_LIST:
15134 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
15135 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
15136 expr = TREE_CHAIN (expr);
15137 goto recursive_label;
15138 break;
15139 case TREE_VEC:
15140 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
15141 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
15142 break;
15143 default:
15144 break;
15146 break;
15147 case tcc_expression:
15148 case tcc_reference:
15149 case tcc_comparison:
15150 case tcc_unary:
15151 case tcc_binary:
15152 case tcc_statement:
15153 case tcc_vl_exp:
15154 len = TREE_OPERAND_LENGTH (expr);
15155 for (i = 0; i < len; ++i)
15156 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
15157 break;
15158 case tcc_declaration:
15159 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
15160 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
15161 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
15163 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
15164 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
15165 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
15166 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
15167 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
15169 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
15170 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
15172 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
15174 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
15175 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
15176 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
15178 break;
15179 case tcc_type:
15180 if (TREE_CODE (expr) == ENUMERAL_TYPE)
15181 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
15182 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
15183 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
15184 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
15185 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
15186 if (INTEGRAL_TYPE_P (expr)
15187 || SCALAR_FLOAT_TYPE_P (expr))
15189 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
15190 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
15192 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
15193 if (TREE_CODE (expr) == RECORD_TYPE
15194 || TREE_CODE (expr) == UNION_TYPE
15195 || TREE_CODE (expr) == QUAL_UNION_TYPE)
15196 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
15197 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
15198 break;
15199 default:
15200 break;
15204 /* Helper function for outputting the checksum of a tree T. When
15205 debugging with gdb, you can "define mynext" to be "next" followed
15206 by "call debug_fold_checksum (op0)", then just trace down till the
15207 outputs differ. */
15209 DEBUG_FUNCTION void
15210 debug_fold_checksum (const_tree t)
15212 int i;
15213 unsigned char checksum[16];
15214 struct md5_ctx ctx;
15215 hash_table <pointer_hash <tree_node> > ht;
15216 ht.create (32);
15218 md5_init_ctx (&ctx);
15219 fold_checksum_tree (t, &ctx, ht);
15220 md5_finish_ctx (&ctx, checksum);
15221 ht.empty ();
15223 for (i = 0; i < 16; i++)
15224 fprintf (stderr, "%d ", checksum[i]);
15226 fprintf (stderr, "\n");
15229 #endif
15231 /* Fold a unary tree expression with code CODE of type TYPE with an
15232 operand OP0. LOC is the location of the resulting expression.
15233 Return a folded expression if successful. Otherwise, return a tree
15234 expression with code CODE of type TYPE with an operand OP0. */
15236 tree
15237 fold_build1_stat_loc (location_t loc,
15238 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15240 tree tem;
15241 #ifdef ENABLE_FOLD_CHECKING
15242 unsigned char checksum_before[16], checksum_after[16];
15243 struct md5_ctx ctx;
15244 hash_table <pointer_hash <tree_node> > ht;
15246 ht.create (32);
15247 md5_init_ctx (&ctx);
15248 fold_checksum_tree (op0, &ctx, ht);
15249 md5_finish_ctx (&ctx, checksum_before);
15250 ht.empty ();
15251 #endif
15253 tem = fold_unary_loc (loc, code, type, op0);
15254 if (!tem)
15255 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15257 #ifdef ENABLE_FOLD_CHECKING
15258 md5_init_ctx (&ctx);
15259 fold_checksum_tree (op0, &ctx, ht);
15260 md5_finish_ctx (&ctx, checksum_after);
15261 ht.dispose ();
15263 if (memcmp (checksum_before, checksum_after, 16))
15264 fold_check_failed (op0, tem);
15265 #endif
15266 return tem;
15269 /* Fold a binary tree expression with code CODE of type TYPE with
15270 operands OP0 and OP1. LOC is the location of the resulting
15271 expression. Return a folded expression if successful. Otherwise,
15272 return a tree expression with code CODE of type TYPE with operands
15273 OP0 and OP1. */
15275 tree
15276 fold_build2_stat_loc (location_t loc,
15277 enum tree_code code, tree type, tree op0, tree op1
15278 MEM_STAT_DECL)
15280 tree tem;
15281 #ifdef ENABLE_FOLD_CHECKING
15282 unsigned char checksum_before_op0[16],
15283 checksum_before_op1[16],
15284 checksum_after_op0[16],
15285 checksum_after_op1[16];
15286 struct md5_ctx ctx;
15287 hash_table <pointer_hash <tree_node> > ht;
15289 ht.create (32);
15290 md5_init_ctx (&ctx);
15291 fold_checksum_tree (op0, &ctx, ht);
15292 md5_finish_ctx (&ctx, checksum_before_op0);
15293 ht.empty ();
15295 md5_init_ctx (&ctx);
15296 fold_checksum_tree (op1, &ctx, ht);
15297 md5_finish_ctx (&ctx, checksum_before_op1);
15298 ht.empty ();
15299 #endif
15301 tem = fold_binary_loc (loc, code, type, op0, op1);
15302 if (!tem)
15303 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15305 #ifdef ENABLE_FOLD_CHECKING
15306 md5_init_ctx (&ctx);
15307 fold_checksum_tree (op0, &ctx, ht);
15308 md5_finish_ctx (&ctx, checksum_after_op0);
15309 ht.empty ();
15311 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15312 fold_check_failed (op0, tem);
15314 md5_init_ctx (&ctx);
15315 fold_checksum_tree (op1, &ctx, ht);
15316 md5_finish_ctx (&ctx, checksum_after_op1);
15317 ht.dispose ();
15319 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15320 fold_check_failed (op1, tem);
15321 #endif
15322 return tem;
15325 /* Fold a ternary tree expression with code CODE of type TYPE with
15326 operands OP0, OP1, and OP2. Return a folded expression if
15327 successful. Otherwise, return a tree expression with code CODE of
15328 type TYPE with operands OP0, OP1, and OP2. */
15330 tree
15331 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15332 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15334 tree tem;
15335 #ifdef ENABLE_FOLD_CHECKING
15336 unsigned char checksum_before_op0[16],
15337 checksum_before_op1[16],
15338 checksum_before_op2[16],
15339 checksum_after_op0[16],
15340 checksum_after_op1[16],
15341 checksum_after_op2[16];
15342 struct md5_ctx ctx;
15343 hash_table <pointer_hash <tree_node> > ht;
15345 ht.create (32);
15346 md5_init_ctx (&ctx);
15347 fold_checksum_tree (op0, &ctx, ht);
15348 md5_finish_ctx (&ctx, checksum_before_op0);
15349 ht.empty ();
15351 md5_init_ctx (&ctx);
15352 fold_checksum_tree (op1, &ctx, ht);
15353 md5_finish_ctx (&ctx, checksum_before_op1);
15354 ht.empty ();
15356 md5_init_ctx (&ctx);
15357 fold_checksum_tree (op2, &ctx, ht);
15358 md5_finish_ctx (&ctx, checksum_before_op2);
15359 ht.empty ();
15360 #endif
15362 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15363 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15364 if (!tem)
15365 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15367 #ifdef ENABLE_FOLD_CHECKING
15368 md5_init_ctx (&ctx);
15369 fold_checksum_tree (op0, &ctx, ht);
15370 md5_finish_ctx (&ctx, checksum_after_op0);
15371 ht.empty ();
15373 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15374 fold_check_failed (op0, tem);
15376 md5_init_ctx (&ctx);
15377 fold_checksum_tree (op1, &ctx, ht);
15378 md5_finish_ctx (&ctx, checksum_after_op1);
15379 ht.empty ();
15381 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15382 fold_check_failed (op1, tem);
15384 md5_init_ctx (&ctx);
15385 fold_checksum_tree (op2, &ctx, ht);
15386 md5_finish_ctx (&ctx, checksum_after_op2);
15387 ht.dispose ();
15389 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15390 fold_check_failed (op2, tem);
15391 #endif
15392 return tem;
15395 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15396 arguments in ARGARRAY, and a null static chain.
15397 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15398 of type TYPE from the given operands as constructed by build_call_array. */
15400 tree
15401 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15402 int nargs, tree *argarray)
15404 tree tem;
15405 #ifdef ENABLE_FOLD_CHECKING
15406 unsigned char checksum_before_fn[16],
15407 checksum_before_arglist[16],
15408 checksum_after_fn[16],
15409 checksum_after_arglist[16];
15410 struct md5_ctx ctx;
15411 hash_table <pointer_hash <tree_node> > ht;
15412 int i;
15414 ht.create (32);
15415 md5_init_ctx (&ctx);
15416 fold_checksum_tree (fn, &ctx, ht);
15417 md5_finish_ctx (&ctx, checksum_before_fn);
15418 ht.empty ();
15420 md5_init_ctx (&ctx);
15421 for (i = 0; i < nargs; i++)
15422 fold_checksum_tree (argarray[i], &ctx, ht);
15423 md5_finish_ctx (&ctx, checksum_before_arglist);
15424 ht.empty ();
15425 #endif
15427 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15429 #ifdef ENABLE_FOLD_CHECKING
15430 md5_init_ctx (&ctx);
15431 fold_checksum_tree (fn, &ctx, ht);
15432 md5_finish_ctx (&ctx, checksum_after_fn);
15433 ht.empty ();
15435 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15436 fold_check_failed (fn, tem);
15438 md5_init_ctx (&ctx);
15439 for (i = 0; i < nargs; i++)
15440 fold_checksum_tree (argarray[i], &ctx, ht);
15441 md5_finish_ctx (&ctx, checksum_after_arglist);
15442 ht.dispose ();
15444 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15445 fold_check_failed (NULL_TREE, tem);
15446 #endif
15447 return tem;
15450 /* Perform constant folding and related simplification of initializer
15451 expression EXPR. These behave identically to "fold_buildN" but ignore
15452 potential run-time traps and exceptions that fold must preserve. */
15454 #define START_FOLD_INIT \
15455 int saved_signaling_nans = flag_signaling_nans;\
15456 int saved_trapping_math = flag_trapping_math;\
15457 int saved_rounding_math = flag_rounding_math;\
15458 int saved_trapv = flag_trapv;\
15459 int saved_folding_initializer = folding_initializer;\
15460 flag_signaling_nans = 0;\
15461 flag_trapping_math = 0;\
15462 flag_rounding_math = 0;\
15463 flag_trapv = 0;\
15464 folding_initializer = 1;
15466 #define END_FOLD_INIT \
15467 flag_signaling_nans = saved_signaling_nans;\
15468 flag_trapping_math = saved_trapping_math;\
15469 flag_rounding_math = saved_rounding_math;\
15470 flag_trapv = saved_trapv;\
15471 folding_initializer = saved_folding_initializer;
15473 tree
15474 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15475 tree type, tree op)
15477 tree result;
15478 START_FOLD_INIT;
15480 result = fold_build1_loc (loc, code, type, op);
15482 END_FOLD_INIT;
15483 return result;
15486 tree
15487 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15488 tree type, tree op0, tree op1)
15490 tree result;
15491 START_FOLD_INIT;
15493 result = fold_build2_loc (loc, code, type, op0, op1);
15495 END_FOLD_INIT;
15496 return result;
15499 tree
15500 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15501 int nargs, tree *argarray)
15503 tree result;
15504 START_FOLD_INIT;
15506 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15508 END_FOLD_INIT;
15509 return result;
15512 #undef START_FOLD_INIT
15513 #undef END_FOLD_INIT
15515 /* Determine if first argument is a multiple of second argument. Return 0 if
15516 it is not, or we cannot easily determined it to be.
15518 An example of the sort of thing we care about (at this point; this routine
15519 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15520 fold cases do now) is discovering that
15522 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15524 is a multiple of
15526 SAVE_EXPR (J * 8)
15528 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15530 This code also handles discovering that
15532 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15534 is a multiple of 8 so we don't have to worry about dealing with a
15535 possible remainder.
15537 Note that we *look* inside a SAVE_EXPR only to determine how it was
15538 calculated; it is not safe for fold to do much of anything else with the
15539 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15540 at run time. For example, the latter example above *cannot* be implemented
15541 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15542 evaluation time of the original SAVE_EXPR is not necessarily the same at
15543 the time the new expression is evaluated. The only optimization of this
15544 sort that would be valid is changing
15546 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15548 divided by 8 to
15550 SAVE_EXPR (I) * SAVE_EXPR (J)
15552 (where the same SAVE_EXPR (J) is used in the original and the
15553 transformed version). */
15556 multiple_of_p (tree type, const_tree top, const_tree bottom)
15558 if (operand_equal_p (top, bottom, 0))
15559 return 1;
15561 if (TREE_CODE (type) != INTEGER_TYPE)
15562 return 0;
15564 switch (TREE_CODE (top))
15566 case BIT_AND_EXPR:
15567 /* Bitwise and provides a power of two multiple. If the mask is
15568 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15569 if (!integer_pow2p (bottom))
15570 return 0;
15571 /* FALLTHRU */
15573 case MULT_EXPR:
15574 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15575 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15577 case PLUS_EXPR:
15578 case MINUS_EXPR:
15579 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15580 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15582 case LSHIFT_EXPR:
15583 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15585 tree op1, t1;
15587 op1 = TREE_OPERAND (top, 1);
15588 /* const_binop may not detect overflow correctly,
15589 so check for it explicitly here. */
15590 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15591 > TREE_INT_CST_LOW (op1)
15592 && TREE_INT_CST_HIGH (op1) == 0
15593 && 0 != (t1 = fold_convert (type,
15594 const_binop (LSHIFT_EXPR,
15595 size_one_node,
15596 op1)))
15597 && !TREE_OVERFLOW (t1))
15598 return multiple_of_p (type, t1, bottom);
15600 return 0;
15602 case NOP_EXPR:
15603 /* Can't handle conversions from non-integral or wider integral type. */
15604 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15605 || (TYPE_PRECISION (type)
15606 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15607 return 0;
15609 /* .. fall through ... */
15611 case SAVE_EXPR:
15612 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15614 case COND_EXPR:
15615 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15616 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15618 case INTEGER_CST:
15619 if (TREE_CODE (bottom) != INTEGER_CST
15620 || integer_zerop (bottom)
15621 || (TYPE_UNSIGNED (type)
15622 && (tree_int_cst_sgn (top) < 0
15623 || tree_int_cst_sgn (bottom) < 0)))
15624 return 0;
15625 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15626 top, bottom));
15628 default:
15629 return 0;
15633 /* Return true if CODE or TYPE is known to be non-negative. */
15635 static bool
15636 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15638 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15639 && truth_value_p (code))
15640 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15641 have a signed:1 type (where the value is -1 and 0). */
15642 return true;
15643 return false;
15646 /* Return true if (CODE OP0) is known to be non-negative. If the return
15647 value is based on the assumption that signed overflow is undefined,
15648 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15649 *STRICT_OVERFLOW_P. */
15651 bool
15652 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15653 bool *strict_overflow_p)
15655 if (TYPE_UNSIGNED (type))
15656 return true;
15658 switch (code)
15660 case ABS_EXPR:
15661 /* We can't return 1 if flag_wrapv is set because
15662 ABS_EXPR<INT_MIN> = INT_MIN. */
15663 if (!INTEGRAL_TYPE_P (type))
15664 return true;
15665 if (TYPE_OVERFLOW_UNDEFINED (type))
15667 *strict_overflow_p = true;
15668 return true;
15670 break;
15672 case NON_LVALUE_EXPR:
15673 case FLOAT_EXPR:
15674 case FIX_TRUNC_EXPR:
15675 return tree_expr_nonnegative_warnv_p (op0,
15676 strict_overflow_p);
15678 case NOP_EXPR:
15680 tree inner_type = TREE_TYPE (op0);
15681 tree outer_type = type;
15683 if (TREE_CODE (outer_type) == REAL_TYPE)
15685 if (TREE_CODE (inner_type) == REAL_TYPE)
15686 return tree_expr_nonnegative_warnv_p (op0,
15687 strict_overflow_p);
15688 if (INTEGRAL_TYPE_P (inner_type))
15690 if (TYPE_UNSIGNED (inner_type))
15691 return true;
15692 return tree_expr_nonnegative_warnv_p (op0,
15693 strict_overflow_p);
15696 else if (INTEGRAL_TYPE_P (outer_type))
15698 if (TREE_CODE (inner_type) == REAL_TYPE)
15699 return tree_expr_nonnegative_warnv_p (op0,
15700 strict_overflow_p);
15701 if (INTEGRAL_TYPE_P (inner_type))
15702 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15703 && TYPE_UNSIGNED (inner_type);
15706 break;
15708 default:
15709 return tree_simple_nonnegative_warnv_p (code, type);
15712 /* We don't know sign of `t', so be conservative and return false. */
15713 return false;
15716 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15717 value is based on the assumption that signed overflow is undefined,
15718 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15719 *STRICT_OVERFLOW_P. */
15721 bool
15722 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15723 tree op1, bool *strict_overflow_p)
15725 if (TYPE_UNSIGNED (type))
15726 return true;
15728 switch (code)
15730 case POINTER_PLUS_EXPR:
15731 case PLUS_EXPR:
15732 if (FLOAT_TYPE_P (type))
15733 return (tree_expr_nonnegative_warnv_p (op0,
15734 strict_overflow_p)
15735 && tree_expr_nonnegative_warnv_p (op1,
15736 strict_overflow_p));
15738 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15739 both unsigned and at least 2 bits shorter than the result. */
15740 if (TREE_CODE (type) == INTEGER_TYPE
15741 && TREE_CODE (op0) == NOP_EXPR
15742 && TREE_CODE (op1) == NOP_EXPR)
15744 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15745 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15746 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15747 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15749 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15750 TYPE_PRECISION (inner2)) + 1;
15751 return prec < TYPE_PRECISION (type);
15754 break;
15756 case MULT_EXPR:
15757 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15759 /* x * x is always non-negative for floating point x
15760 or without overflow. */
15761 if (operand_equal_p (op0, op1, 0)
15762 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15763 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15765 if (TYPE_OVERFLOW_UNDEFINED (type))
15766 *strict_overflow_p = true;
15767 return true;
15771 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15772 both unsigned and their total bits is shorter than the result. */
15773 if (TREE_CODE (type) == INTEGER_TYPE
15774 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15775 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15777 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15778 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15779 : TREE_TYPE (op0);
15780 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15781 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15782 : TREE_TYPE (op1);
15784 bool unsigned0 = TYPE_UNSIGNED (inner0);
15785 bool unsigned1 = TYPE_UNSIGNED (inner1);
15787 if (TREE_CODE (op0) == INTEGER_CST)
15788 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15790 if (TREE_CODE (op1) == INTEGER_CST)
15791 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15793 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15794 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15796 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15797 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15798 : TYPE_PRECISION (inner0);
15800 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15801 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15802 : TYPE_PRECISION (inner1);
15804 return precision0 + precision1 < TYPE_PRECISION (type);
15807 return false;
15809 case BIT_AND_EXPR:
15810 case MAX_EXPR:
15811 return (tree_expr_nonnegative_warnv_p (op0,
15812 strict_overflow_p)
15813 || tree_expr_nonnegative_warnv_p (op1,
15814 strict_overflow_p));
15816 case BIT_IOR_EXPR:
15817 case BIT_XOR_EXPR:
15818 case MIN_EXPR:
15819 case RDIV_EXPR:
15820 case TRUNC_DIV_EXPR:
15821 case CEIL_DIV_EXPR:
15822 case FLOOR_DIV_EXPR:
15823 case ROUND_DIV_EXPR:
15824 return (tree_expr_nonnegative_warnv_p (op0,
15825 strict_overflow_p)
15826 && tree_expr_nonnegative_warnv_p (op1,
15827 strict_overflow_p));
15829 case TRUNC_MOD_EXPR:
15830 case CEIL_MOD_EXPR:
15831 case FLOOR_MOD_EXPR:
15832 case ROUND_MOD_EXPR:
15833 return tree_expr_nonnegative_warnv_p (op0,
15834 strict_overflow_p);
15835 default:
15836 return tree_simple_nonnegative_warnv_p (code, type);
15839 /* We don't know sign of `t', so be conservative and return false. */
15840 return false;
15843 /* Return true if T is known to be non-negative. If the return
15844 value is based on the assumption that signed overflow is undefined,
15845 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15846 *STRICT_OVERFLOW_P. */
15848 bool
15849 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15851 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15852 return true;
15854 switch (TREE_CODE (t))
15856 case INTEGER_CST:
15857 return tree_int_cst_sgn (t) >= 0;
15859 case REAL_CST:
15860 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15862 case FIXED_CST:
15863 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15865 case COND_EXPR:
15866 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15867 strict_overflow_p)
15868 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15869 strict_overflow_p));
15870 default:
15871 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15872 TREE_TYPE (t));
15874 /* We don't know sign of `t', so be conservative and return false. */
15875 return false;
15878 /* Return true if T is known to be non-negative. If the return
15879 value is based on the assumption that signed overflow is undefined,
15880 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15881 *STRICT_OVERFLOW_P. */
15883 bool
15884 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15885 tree arg0, tree arg1, bool *strict_overflow_p)
15887 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15888 switch (DECL_FUNCTION_CODE (fndecl))
15890 CASE_FLT_FN (BUILT_IN_ACOS):
15891 CASE_FLT_FN (BUILT_IN_ACOSH):
15892 CASE_FLT_FN (BUILT_IN_CABS):
15893 CASE_FLT_FN (BUILT_IN_COSH):
15894 CASE_FLT_FN (BUILT_IN_ERFC):
15895 CASE_FLT_FN (BUILT_IN_EXP):
15896 CASE_FLT_FN (BUILT_IN_EXP10):
15897 CASE_FLT_FN (BUILT_IN_EXP2):
15898 CASE_FLT_FN (BUILT_IN_FABS):
15899 CASE_FLT_FN (BUILT_IN_FDIM):
15900 CASE_FLT_FN (BUILT_IN_HYPOT):
15901 CASE_FLT_FN (BUILT_IN_POW10):
15902 CASE_INT_FN (BUILT_IN_FFS):
15903 CASE_INT_FN (BUILT_IN_PARITY):
15904 CASE_INT_FN (BUILT_IN_POPCOUNT):
15905 CASE_INT_FN (BUILT_IN_CLZ):
15906 CASE_INT_FN (BUILT_IN_CLRSB):
15907 case BUILT_IN_BSWAP32:
15908 case BUILT_IN_BSWAP64:
15909 /* Always true. */
15910 return true;
15912 CASE_FLT_FN (BUILT_IN_SQRT):
15913 /* sqrt(-0.0) is -0.0. */
15914 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15915 return true;
15916 return tree_expr_nonnegative_warnv_p (arg0,
15917 strict_overflow_p);
15919 CASE_FLT_FN (BUILT_IN_ASINH):
15920 CASE_FLT_FN (BUILT_IN_ATAN):
15921 CASE_FLT_FN (BUILT_IN_ATANH):
15922 CASE_FLT_FN (BUILT_IN_CBRT):
15923 CASE_FLT_FN (BUILT_IN_CEIL):
15924 CASE_FLT_FN (BUILT_IN_ERF):
15925 CASE_FLT_FN (BUILT_IN_EXPM1):
15926 CASE_FLT_FN (BUILT_IN_FLOOR):
15927 CASE_FLT_FN (BUILT_IN_FMOD):
15928 CASE_FLT_FN (BUILT_IN_FREXP):
15929 CASE_FLT_FN (BUILT_IN_ICEIL):
15930 CASE_FLT_FN (BUILT_IN_IFLOOR):
15931 CASE_FLT_FN (BUILT_IN_IRINT):
15932 CASE_FLT_FN (BUILT_IN_IROUND):
15933 CASE_FLT_FN (BUILT_IN_LCEIL):
15934 CASE_FLT_FN (BUILT_IN_LDEXP):
15935 CASE_FLT_FN (BUILT_IN_LFLOOR):
15936 CASE_FLT_FN (BUILT_IN_LLCEIL):
15937 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15938 CASE_FLT_FN (BUILT_IN_LLRINT):
15939 CASE_FLT_FN (BUILT_IN_LLROUND):
15940 CASE_FLT_FN (BUILT_IN_LRINT):
15941 CASE_FLT_FN (BUILT_IN_LROUND):
15942 CASE_FLT_FN (BUILT_IN_MODF):
15943 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15944 CASE_FLT_FN (BUILT_IN_RINT):
15945 CASE_FLT_FN (BUILT_IN_ROUND):
15946 CASE_FLT_FN (BUILT_IN_SCALB):
15947 CASE_FLT_FN (BUILT_IN_SCALBLN):
15948 CASE_FLT_FN (BUILT_IN_SCALBN):
15949 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15950 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15951 CASE_FLT_FN (BUILT_IN_SINH):
15952 CASE_FLT_FN (BUILT_IN_TANH):
15953 CASE_FLT_FN (BUILT_IN_TRUNC):
15954 /* True if the 1st argument is nonnegative. */
15955 return tree_expr_nonnegative_warnv_p (arg0,
15956 strict_overflow_p);
15958 CASE_FLT_FN (BUILT_IN_FMAX):
15959 /* True if the 1st OR 2nd arguments are nonnegative. */
15960 return (tree_expr_nonnegative_warnv_p (arg0,
15961 strict_overflow_p)
15962 || (tree_expr_nonnegative_warnv_p (arg1,
15963 strict_overflow_p)));
15965 CASE_FLT_FN (BUILT_IN_FMIN):
15966 /* True if the 1st AND 2nd arguments are nonnegative. */
15967 return (tree_expr_nonnegative_warnv_p (arg0,
15968 strict_overflow_p)
15969 && (tree_expr_nonnegative_warnv_p (arg1,
15970 strict_overflow_p)));
15972 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15973 /* True if the 2nd argument is nonnegative. */
15974 return tree_expr_nonnegative_warnv_p (arg1,
15975 strict_overflow_p);
15977 CASE_FLT_FN (BUILT_IN_POWI):
15978 /* True if the 1st argument is nonnegative or the second
15979 argument is an even integer. */
15980 if (TREE_CODE (arg1) == INTEGER_CST
15981 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15982 return true;
15983 return tree_expr_nonnegative_warnv_p (arg0,
15984 strict_overflow_p);
15986 CASE_FLT_FN (BUILT_IN_POW):
15987 /* True if the 1st argument is nonnegative or the second
15988 argument is an even integer valued real. */
15989 if (TREE_CODE (arg1) == REAL_CST)
15991 REAL_VALUE_TYPE c;
15992 HOST_WIDE_INT n;
15994 c = TREE_REAL_CST (arg1);
15995 n = real_to_integer (&c);
15996 if ((n & 1) == 0)
15998 REAL_VALUE_TYPE cint;
15999 real_from_integer (&cint, VOIDmode, n,
16000 n < 0 ? -1 : 0, 0);
16001 if (real_identical (&c, &cint))
16002 return true;
16005 return tree_expr_nonnegative_warnv_p (arg0,
16006 strict_overflow_p);
16008 default:
16009 break;
16011 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
16012 type);
16015 /* Return true if T is known to be non-negative. If the return
16016 value is based on the assumption that signed overflow is undefined,
16017 set *STRICT_OVERFLOW_P to true; otherwise, don't change
16018 *STRICT_OVERFLOW_P. */
16020 static bool
16021 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
16023 enum tree_code code = TREE_CODE (t);
16024 if (TYPE_UNSIGNED (TREE_TYPE (t)))
16025 return true;
16027 switch (code)
16029 case TARGET_EXPR:
16031 tree temp = TARGET_EXPR_SLOT (t);
16032 t = TARGET_EXPR_INITIAL (t);
16034 /* If the initializer is non-void, then it's a normal expression
16035 that will be assigned to the slot. */
16036 if (!VOID_TYPE_P (t))
16037 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
16039 /* Otherwise, the initializer sets the slot in some way. One common
16040 way is an assignment statement at the end of the initializer. */
16041 while (1)
16043 if (TREE_CODE (t) == BIND_EXPR)
16044 t = expr_last (BIND_EXPR_BODY (t));
16045 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
16046 || TREE_CODE (t) == TRY_CATCH_EXPR)
16047 t = expr_last (TREE_OPERAND (t, 0));
16048 else if (TREE_CODE (t) == STATEMENT_LIST)
16049 t = expr_last (t);
16050 else
16051 break;
16053 if (TREE_CODE (t) == MODIFY_EXPR
16054 && TREE_OPERAND (t, 0) == temp)
16055 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
16056 strict_overflow_p);
16058 return false;
16061 case CALL_EXPR:
16063 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
16064 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
16066 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
16067 get_callee_fndecl (t),
16068 arg0,
16069 arg1,
16070 strict_overflow_p);
16072 case COMPOUND_EXPR:
16073 case MODIFY_EXPR:
16074 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
16075 strict_overflow_p);
16076 case BIND_EXPR:
16077 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
16078 strict_overflow_p);
16079 case SAVE_EXPR:
16080 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
16081 strict_overflow_p);
16083 default:
16084 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
16085 TREE_TYPE (t));
16088 /* We don't know sign of `t', so be conservative and return false. */
16089 return false;
16092 /* Return true if T is known to be non-negative. If the return
16093 value is based on the assumption that signed overflow is undefined,
16094 set *STRICT_OVERFLOW_P to true; otherwise, don't change
16095 *STRICT_OVERFLOW_P. */
16097 bool
16098 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
16100 enum tree_code code;
16101 if (t == error_mark_node)
16102 return false;
16104 code = TREE_CODE (t);
16105 switch (TREE_CODE_CLASS (code))
16107 case tcc_binary:
16108 case tcc_comparison:
16109 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16110 TREE_TYPE (t),
16111 TREE_OPERAND (t, 0),
16112 TREE_OPERAND (t, 1),
16113 strict_overflow_p);
16115 case tcc_unary:
16116 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16117 TREE_TYPE (t),
16118 TREE_OPERAND (t, 0),
16119 strict_overflow_p);
16121 case tcc_constant:
16122 case tcc_declaration:
16123 case tcc_reference:
16124 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16126 default:
16127 break;
16130 switch (code)
16132 case TRUTH_AND_EXPR:
16133 case TRUTH_OR_EXPR:
16134 case TRUTH_XOR_EXPR:
16135 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16136 TREE_TYPE (t),
16137 TREE_OPERAND (t, 0),
16138 TREE_OPERAND (t, 1),
16139 strict_overflow_p);
16140 case TRUTH_NOT_EXPR:
16141 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16142 TREE_TYPE (t),
16143 TREE_OPERAND (t, 0),
16144 strict_overflow_p);
16146 case COND_EXPR:
16147 case CONSTRUCTOR:
16148 case OBJ_TYPE_REF:
16149 case ASSERT_EXPR:
16150 case ADDR_EXPR:
16151 case WITH_SIZE_EXPR:
16152 case SSA_NAME:
16153 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16155 default:
16156 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
16160 /* Return true if `t' is known to be non-negative. Handle warnings
16161 about undefined signed overflow. */
16163 bool
16164 tree_expr_nonnegative_p (tree t)
16166 bool ret, strict_overflow_p;
16168 strict_overflow_p = false;
16169 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
16170 if (strict_overflow_p)
16171 fold_overflow_warning (("assuming signed overflow does not occur when "
16172 "determining that expression is always "
16173 "non-negative"),
16174 WARN_STRICT_OVERFLOW_MISC);
16175 return ret;
16179 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16180 For floating point we further ensure that T is not denormal.
16181 Similar logic is present in nonzero_address in rtlanal.h.
16183 If the return value is based on the assumption that signed overflow
16184 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16185 change *STRICT_OVERFLOW_P. */
16187 bool
16188 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16189 bool *strict_overflow_p)
16191 switch (code)
16193 case ABS_EXPR:
16194 return tree_expr_nonzero_warnv_p (op0,
16195 strict_overflow_p);
16197 case NOP_EXPR:
16199 tree inner_type = TREE_TYPE (op0);
16200 tree outer_type = type;
16202 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16203 && tree_expr_nonzero_warnv_p (op0,
16204 strict_overflow_p));
16206 break;
16208 case NON_LVALUE_EXPR:
16209 return tree_expr_nonzero_warnv_p (op0,
16210 strict_overflow_p);
16212 default:
16213 break;
16216 return false;
16219 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16220 For floating point we further ensure that T is not denormal.
16221 Similar logic is present in nonzero_address in rtlanal.h.
16223 If the return value is based on the assumption that signed overflow
16224 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16225 change *STRICT_OVERFLOW_P. */
16227 bool
16228 tree_binary_nonzero_warnv_p (enum tree_code code,
16229 tree type,
16230 tree op0,
16231 tree op1, bool *strict_overflow_p)
16233 bool sub_strict_overflow_p;
16234 switch (code)
16236 case POINTER_PLUS_EXPR:
16237 case PLUS_EXPR:
16238 if (TYPE_OVERFLOW_UNDEFINED (type))
16240 /* With the presence of negative values it is hard
16241 to say something. */
16242 sub_strict_overflow_p = false;
16243 if (!tree_expr_nonnegative_warnv_p (op0,
16244 &sub_strict_overflow_p)
16245 || !tree_expr_nonnegative_warnv_p (op1,
16246 &sub_strict_overflow_p))
16247 return false;
16248 /* One of operands must be positive and the other non-negative. */
16249 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16250 overflows, on a twos-complement machine the sum of two
16251 nonnegative numbers can never be zero. */
16252 return (tree_expr_nonzero_warnv_p (op0,
16253 strict_overflow_p)
16254 || tree_expr_nonzero_warnv_p (op1,
16255 strict_overflow_p));
16257 break;
16259 case MULT_EXPR:
16260 if (TYPE_OVERFLOW_UNDEFINED (type))
16262 if (tree_expr_nonzero_warnv_p (op0,
16263 strict_overflow_p)
16264 && tree_expr_nonzero_warnv_p (op1,
16265 strict_overflow_p))
16267 *strict_overflow_p = true;
16268 return true;
16271 break;
16273 case MIN_EXPR:
16274 sub_strict_overflow_p = false;
16275 if (tree_expr_nonzero_warnv_p (op0,
16276 &sub_strict_overflow_p)
16277 && tree_expr_nonzero_warnv_p (op1,
16278 &sub_strict_overflow_p))
16280 if (sub_strict_overflow_p)
16281 *strict_overflow_p = true;
16283 break;
16285 case MAX_EXPR:
16286 sub_strict_overflow_p = false;
16287 if (tree_expr_nonzero_warnv_p (op0,
16288 &sub_strict_overflow_p))
16290 if (sub_strict_overflow_p)
16291 *strict_overflow_p = true;
16293 /* When both operands are nonzero, then MAX must be too. */
16294 if (tree_expr_nonzero_warnv_p (op1,
16295 strict_overflow_p))
16296 return true;
16298 /* MAX where operand 0 is positive is positive. */
16299 return tree_expr_nonnegative_warnv_p (op0,
16300 strict_overflow_p);
16302 /* MAX where operand 1 is positive is positive. */
16303 else if (tree_expr_nonzero_warnv_p (op1,
16304 &sub_strict_overflow_p)
16305 && tree_expr_nonnegative_warnv_p (op1,
16306 &sub_strict_overflow_p))
16308 if (sub_strict_overflow_p)
16309 *strict_overflow_p = true;
16310 return true;
16312 break;
16314 case BIT_IOR_EXPR:
16315 return (tree_expr_nonzero_warnv_p (op1,
16316 strict_overflow_p)
16317 || tree_expr_nonzero_warnv_p (op0,
16318 strict_overflow_p));
16320 default:
16321 break;
16324 return false;
16327 /* Return true when T is an address and is known to be nonzero.
16328 For floating point we further ensure that T is not denormal.
16329 Similar logic is present in nonzero_address in rtlanal.h.
16331 If the return value is based on the assumption that signed overflow
16332 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16333 change *STRICT_OVERFLOW_P. */
16335 bool
16336 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16338 bool sub_strict_overflow_p;
16339 switch (TREE_CODE (t))
16341 case INTEGER_CST:
16342 return !integer_zerop (t);
16344 case ADDR_EXPR:
16346 tree base = TREE_OPERAND (t, 0);
16347 if (!DECL_P (base))
16348 base = get_base_address (base);
16350 if (!base)
16351 return false;
16353 /* Weak declarations may link to NULL. Other things may also be NULL
16354 so protect with -fdelete-null-pointer-checks; but not variables
16355 allocated on the stack. */
16356 if (DECL_P (base)
16357 && (flag_delete_null_pointer_checks
16358 || (DECL_CONTEXT (base)
16359 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16360 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16361 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16363 /* Constants are never weak. */
16364 if (CONSTANT_CLASS_P (base))
16365 return true;
16367 return false;
16370 case COND_EXPR:
16371 sub_strict_overflow_p = false;
16372 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16373 &sub_strict_overflow_p)
16374 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16375 &sub_strict_overflow_p))
16377 if (sub_strict_overflow_p)
16378 *strict_overflow_p = true;
16379 return true;
16381 break;
16383 default:
16384 break;
16386 return false;
16389 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16390 attempt to fold the expression to a constant without modifying TYPE,
16391 OP0 or OP1.
16393 If the expression could be simplified to a constant, then return
16394 the constant. If the expression would not be simplified to a
16395 constant, then return NULL_TREE. */
16397 tree
16398 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16400 tree tem = fold_binary (code, type, op0, op1);
16401 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16404 /* Given the components of a unary expression CODE, TYPE and OP0,
16405 attempt to fold the expression to a constant without modifying
16406 TYPE or OP0.
16408 If the expression could be simplified to a constant, then return
16409 the constant. If the expression would not be simplified to a
16410 constant, then return NULL_TREE. */
16412 tree
16413 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16415 tree tem = fold_unary (code, type, op0);
16416 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16419 /* If EXP represents referencing an element in a constant string
16420 (either via pointer arithmetic or array indexing), return the
16421 tree representing the value accessed, otherwise return NULL. */
16423 tree
16424 fold_read_from_constant_string (tree exp)
16426 if ((TREE_CODE (exp) == INDIRECT_REF
16427 || TREE_CODE (exp) == ARRAY_REF)
16428 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16430 tree exp1 = TREE_OPERAND (exp, 0);
16431 tree index;
16432 tree string;
16433 location_t loc = EXPR_LOCATION (exp);
16435 if (TREE_CODE (exp) == INDIRECT_REF)
16436 string = string_constant (exp1, &index);
16437 else
16439 tree low_bound = array_ref_low_bound (exp);
16440 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16442 /* Optimize the special-case of a zero lower bound.
16444 We convert the low_bound to sizetype to avoid some problems
16445 with constant folding. (E.g. suppose the lower bound is 1,
16446 and its mode is QI. Without the conversion,l (ARRAY
16447 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16448 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16449 if (! integer_zerop (low_bound))
16450 index = size_diffop_loc (loc, index,
16451 fold_convert_loc (loc, sizetype, low_bound));
16453 string = exp1;
16456 if (string
16457 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16458 && TREE_CODE (string) == STRING_CST
16459 && TREE_CODE (index) == INTEGER_CST
16460 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16461 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16462 == MODE_INT)
16463 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16464 return build_int_cst_type (TREE_TYPE (exp),
16465 (TREE_STRING_POINTER (string)
16466 [TREE_INT_CST_LOW (index)]));
16468 return NULL;
16471 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16472 an integer constant, real, or fixed-point constant.
16474 TYPE is the type of the result. */
16476 static tree
16477 fold_negate_const (tree arg0, tree type)
16479 tree t = NULL_TREE;
16481 switch (TREE_CODE (arg0))
16483 case INTEGER_CST:
16485 double_int val = tree_to_double_int (arg0);
16486 bool overflow;
16487 val = val.neg_with_overflow (&overflow);
16488 t = force_fit_type_double (type, val, 1,
16489 (overflow | TREE_OVERFLOW (arg0))
16490 && !TYPE_UNSIGNED (type));
16491 break;
16494 case REAL_CST:
16495 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16496 break;
16498 case FIXED_CST:
16500 FIXED_VALUE_TYPE f;
16501 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16502 &(TREE_FIXED_CST (arg0)), NULL,
16503 TYPE_SATURATING (type));
16504 t = build_fixed (type, f);
16505 /* Propagate overflow flags. */
16506 if (overflow_p | TREE_OVERFLOW (arg0))
16507 TREE_OVERFLOW (t) = 1;
16508 break;
16511 default:
16512 gcc_unreachable ();
16515 return t;
16518 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16519 an integer constant or real constant.
16521 TYPE is the type of the result. */
16523 tree
16524 fold_abs_const (tree arg0, tree type)
16526 tree t = NULL_TREE;
16528 switch (TREE_CODE (arg0))
16530 case INTEGER_CST:
16532 double_int val = tree_to_double_int (arg0);
16534 /* If the value is unsigned or non-negative, then the absolute value
16535 is the same as the ordinary value. */
16536 if (TYPE_UNSIGNED (type)
16537 || !val.is_negative ())
16538 t = arg0;
16540 /* If the value is negative, then the absolute value is
16541 its negation. */
16542 else
16544 bool overflow;
16545 val = val.neg_with_overflow (&overflow);
16546 t = force_fit_type_double (type, val, -1,
16547 overflow | TREE_OVERFLOW (arg0));
16550 break;
16552 case REAL_CST:
16553 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16554 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16555 else
16556 t = arg0;
16557 break;
16559 default:
16560 gcc_unreachable ();
16563 return t;
16566 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16567 constant. TYPE is the type of the result. */
16569 static tree
16570 fold_not_const (const_tree arg0, tree type)
16572 double_int val;
16574 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16576 val = ~tree_to_double_int (arg0);
16577 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16580 /* Given CODE, a relational operator, the target type, TYPE and two
16581 constant operands OP0 and OP1, return the result of the
16582 relational operation. If the result is not a compile time
16583 constant, then return NULL_TREE. */
16585 static tree
16586 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16588 int result, invert;
16590 /* From here on, the only cases we handle are when the result is
16591 known to be a constant. */
16593 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16595 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16596 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16598 /* Handle the cases where either operand is a NaN. */
16599 if (real_isnan (c0) || real_isnan (c1))
16601 switch (code)
16603 case EQ_EXPR:
16604 case ORDERED_EXPR:
16605 result = 0;
16606 break;
16608 case NE_EXPR:
16609 case UNORDERED_EXPR:
16610 case UNLT_EXPR:
16611 case UNLE_EXPR:
16612 case UNGT_EXPR:
16613 case UNGE_EXPR:
16614 case UNEQ_EXPR:
16615 result = 1;
16616 break;
16618 case LT_EXPR:
16619 case LE_EXPR:
16620 case GT_EXPR:
16621 case GE_EXPR:
16622 case LTGT_EXPR:
16623 if (flag_trapping_math)
16624 return NULL_TREE;
16625 result = 0;
16626 break;
16628 default:
16629 gcc_unreachable ();
16632 return constant_boolean_node (result, type);
16635 return constant_boolean_node (real_compare (code, c0, c1), type);
16638 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16640 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16641 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16642 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16645 /* Handle equality/inequality of complex constants. */
16646 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16648 tree rcond = fold_relational_const (code, type,
16649 TREE_REALPART (op0),
16650 TREE_REALPART (op1));
16651 tree icond = fold_relational_const (code, type,
16652 TREE_IMAGPART (op0),
16653 TREE_IMAGPART (op1));
16654 if (code == EQ_EXPR)
16655 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16656 else if (code == NE_EXPR)
16657 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16658 else
16659 return NULL_TREE;
16662 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16664 unsigned count = VECTOR_CST_NELTS (op0);
16665 tree *elts = XALLOCAVEC (tree, count);
16666 gcc_assert (VECTOR_CST_NELTS (op1) == count
16667 && TYPE_VECTOR_SUBPARTS (type) == count);
16669 for (unsigned i = 0; i < count; i++)
16671 tree elem_type = TREE_TYPE (type);
16672 tree elem0 = VECTOR_CST_ELT (op0, i);
16673 tree elem1 = VECTOR_CST_ELT (op1, i);
16675 tree tem = fold_relational_const (code, elem_type,
16676 elem0, elem1);
16678 if (tem == NULL_TREE)
16679 return NULL_TREE;
16681 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16684 return build_vector (type, elts);
16687 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16689 To compute GT, swap the arguments and do LT.
16690 To compute GE, do LT and invert the result.
16691 To compute LE, swap the arguments, do LT and invert the result.
16692 To compute NE, do EQ and invert the result.
16694 Therefore, the code below must handle only EQ and LT. */
16696 if (code == LE_EXPR || code == GT_EXPR)
16698 tree tem = op0;
16699 op0 = op1;
16700 op1 = tem;
16701 code = swap_tree_comparison (code);
16704 /* Note that it is safe to invert for real values here because we
16705 have already handled the one case that it matters. */
16707 invert = 0;
16708 if (code == NE_EXPR || code == GE_EXPR)
16710 invert = 1;
16711 code = invert_tree_comparison (code, false);
16714 /* Compute a result for LT or EQ if args permit;
16715 Otherwise return T. */
16716 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16718 if (code == EQ_EXPR)
16719 result = tree_int_cst_equal (op0, op1);
16720 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16721 result = INT_CST_LT_UNSIGNED (op0, op1);
16722 else
16723 result = INT_CST_LT (op0, op1);
16725 else
16726 return NULL_TREE;
16728 if (invert)
16729 result ^= 1;
16730 return constant_boolean_node (result, type);
16733 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16734 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16735 itself. */
16737 tree
16738 fold_build_cleanup_point_expr (tree type, tree expr)
16740 /* If the expression does not have side effects then we don't have to wrap
16741 it with a cleanup point expression. */
16742 if (!TREE_SIDE_EFFECTS (expr))
16743 return expr;
16745 /* If the expression is a return, check to see if the expression inside the
16746 return has no side effects or the right hand side of the modify expression
16747 inside the return. If either don't have side effects set we don't need to
16748 wrap the expression in a cleanup point expression. Note we don't check the
16749 left hand side of the modify because it should always be a return decl. */
16750 if (TREE_CODE (expr) == RETURN_EXPR)
16752 tree op = TREE_OPERAND (expr, 0);
16753 if (!op || !TREE_SIDE_EFFECTS (op))
16754 return expr;
16755 op = TREE_OPERAND (op, 1);
16756 if (!TREE_SIDE_EFFECTS (op))
16757 return expr;
16760 return build1 (CLEANUP_POINT_EXPR, type, expr);
16763 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16764 of an indirection through OP0, or NULL_TREE if no simplification is
16765 possible. */
16767 tree
16768 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16770 tree sub = op0;
16771 tree subtype;
16773 STRIP_NOPS (sub);
16774 subtype = TREE_TYPE (sub);
16775 if (!POINTER_TYPE_P (subtype))
16776 return NULL_TREE;
16778 if (TREE_CODE (sub) == ADDR_EXPR)
16780 tree op = TREE_OPERAND (sub, 0);
16781 tree optype = TREE_TYPE (op);
16782 /* *&CONST_DECL -> to the value of the const decl. */
16783 if (TREE_CODE (op) == CONST_DECL)
16784 return DECL_INITIAL (op);
16785 /* *&p => p; make sure to handle *&"str"[cst] here. */
16786 if (type == optype)
16788 tree fop = fold_read_from_constant_string (op);
16789 if (fop)
16790 return fop;
16791 else
16792 return op;
16794 /* *(foo *)&fooarray => fooarray[0] */
16795 else if (TREE_CODE (optype) == ARRAY_TYPE
16796 && type == TREE_TYPE (optype)
16797 && (!in_gimple_form
16798 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16800 tree type_domain = TYPE_DOMAIN (optype);
16801 tree min_val = size_zero_node;
16802 if (type_domain && TYPE_MIN_VALUE (type_domain))
16803 min_val = TYPE_MIN_VALUE (type_domain);
16804 if (in_gimple_form
16805 && TREE_CODE (min_val) != INTEGER_CST)
16806 return NULL_TREE;
16807 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16808 NULL_TREE, NULL_TREE);
16810 /* *(foo *)&complexfoo => __real__ complexfoo */
16811 else if (TREE_CODE (optype) == COMPLEX_TYPE
16812 && type == TREE_TYPE (optype))
16813 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16814 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16815 else if (TREE_CODE (optype) == VECTOR_TYPE
16816 && type == TREE_TYPE (optype))
16818 tree part_width = TYPE_SIZE (type);
16819 tree index = bitsize_int (0);
16820 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16824 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16825 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16827 tree op00 = TREE_OPERAND (sub, 0);
16828 tree op01 = TREE_OPERAND (sub, 1);
16830 STRIP_NOPS (op00);
16831 if (TREE_CODE (op00) == ADDR_EXPR)
16833 tree op00type;
16834 op00 = TREE_OPERAND (op00, 0);
16835 op00type = TREE_TYPE (op00);
16837 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16838 if (TREE_CODE (op00type) == VECTOR_TYPE
16839 && type == TREE_TYPE (op00type))
16841 HOST_WIDE_INT offset = tree_to_shwi (op01);
16842 tree part_width = TYPE_SIZE (type);
16843 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16844 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16845 tree index = bitsize_int (indexi);
16847 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16848 return fold_build3_loc (loc,
16849 BIT_FIELD_REF, type, op00,
16850 part_width, index);
16853 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16854 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16855 && type == TREE_TYPE (op00type))
16857 tree size = TYPE_SIZE_UNIT (type);
16858 if (tree_int_cst_equal (size, op01))
16859 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16861 /* ((foo *)&fooarray)[1] => fooarray[1] */
16862 else if (TREE_CODE (op00type) == ARRAY_TYPE
16863 && type == TREE_TYPE (op00type))
16865 tree type_domain = TYPE_DOMAIN (op00type);
16866 tree min_val = size_zero_node;
16867 if (type_domain && TYPE_MIN_VALUE (type_domain))
16868 min_val = TYPE_MIN_VALUE (type_domain);
16869 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16870 TYPE_SIZE_UNIT (type));
16871 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16872 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16873 NULL_TREE, NULL_TREE);
16878 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16879 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16880 && type == TREE_TYPE (TREE_TYPE (subtype))
16881 && (!in_gimple_form
16882 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16884 tree type_domain;
16885 tree min_val = size_zero_node;
16886 sub = build_fold_indirect_ref_loc (loc, sub);
16887 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16888 if (type_domain && TYPE_MIN_VALUE (type_domain))
16889 min_val = TYPE_MIN_VALUE (type_domain);
16890 if (in_gimple_form
16891 && TREE_CODE (min_val) != INTEGER_CST)
16892 return NULL_TREE;
16893 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16894 NULL_TREE);
16897 return NULL_TREE;
16900 /* Builds an expression for an indirection through T, simplifying some
16901 cases. */
16903 tree
16904 build_fold_indirect_ref_loc (location_t loc, tree t)
16906 tree type = TREE_TYPE (TREE_TYPE (t));
16907 tree sub = fold_indirect_ref_1 (loc, type, t);
16909 if (sub)
16910 return sub;
16912 return build1_loc (loc, INDIRECT_REF, type, t);
16915 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16917 tree
16918 fold_indirect_ref_loc (location_t loc, tree t)
16920 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16922 if (sub)
16923 return sub;
16924 else
16925 return t;
16928 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16929 whose result is ignored. The type of the returned tree need not be
16930 the same as the original expression. */
16932 tree
16933 fold_ignored_result (tree t)
16935 if (!TREE_SIDE_EFFECTS (t))
16936 return integer_zero_node;
16938 for (;;)
16939 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16941 case tcc_unary:
16942 t = TREE_OPERAND (t, 0);
16943 break;
16945 case tcc_binary:
16946 case tcc_comparison:
16947 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16948 t = TREE_OPERAND (t, 0);
16949 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16950 t = TREE_OPERAND (t, 1);
16951 else
16952 return t;
16953 break;
16955 case tcc_expression:
16956 switch (TREE_CODE (t))
16958 case COMPOUND_EXPR:
16959 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16960 return t;
16961 t = TREE_OPERAND (t, 0);
16962 break;
16964 case COND_EXPR:
16965 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16966 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16967 return t;
16968 t = TREE_OPERAND (t, 0);
16969 break;
16971 default:
16972 return t;
16974 break;
16976 default:
16977 return t;
16981 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16982 This can only be applied to objects of a sizetype. */
16984 tree
16985 round_up_loc (location_t loc, tree value, int divisor)
16987 tree div = NULL_TREE;
16989 gcc_assert (divisor > 0);
16990 if (divisor == 1)
16991 return value;
16993 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16994 have to do anything. Only do this when we are not given a const,
16995 because in that case, this check is more expensive than just
16996 doing it. */
16997 if (TREE_CODE (value) != INTEGER_CST)
16999 div = build_int_cst (TREE_TYPE (value), divisor);
17001 if (multiple_of_p (TREE_TYPE (value), value, div))
17002 return value;
17005 /* If divisor is a power of two, simplify this to bit manipulation. */
17006 if (divisor == (divisor & -divisor))
17008 if (TREE_CODE (value) == INTEGER_CST)
17010 double_int val = tree_to_double_int (value);
17011 bool overflow_p;
17013 if ((val.low & (divisor - 1)) == 0)
17014 return value;
17016 overflow_p = TREE_OVERFLOW (value);
17017 val.low &= ~(divisor - 1);
17018 val.low += divisor;
17019 if (val.low == 0)
17021 val.high++;
17022 if (val.high == 0)
17023 overflow_p = true;
17026 return force_fit_type_double (TREE_TYPE (value), val,
17027 -1, overflow_p);
17029 else
17031 tree t;
17033 t = build_int_cst (TREE_TYPE (value), divisor - 1);
17034 value = size_binop_loc (loc, PLUS_EXPR, value, t);
17035 t = build_int_cst (TREE_TYPE (value), -divisor);
17036 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
17039 else
17041 if (!div)
17042 div = build_int_cst (TREE_TYPE (value), divisor);
17043 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
17044 value = size_binop_loc (loc, MULT_EXPR, value, div);
17047 return value;
17050 /* Likewise, but round down. */
17052 tree
17053 round_down_loc (location_t loc, tree value, int divisor)
17055 tree div = NULL_TREE;
17057 gcc_assert (divisor > 0);
17058 if (divisor == 1)
17059 return value;
17061 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
17062 have to do anything. Only do this when we are not given a const,
17063 because in that case, this check is more expensive than just
17064 doing it. */
17065 if (TREE_CODE (value) != INTEGER_CST)
17067 div = build_int_cst (TREE_TYPE (value), divisor);
17069 if (multiple_of_p (TREE_TYPE (value), value, div))
17070 return value;
17073 /* If divisor is a power of two, simplify this to bit manipulation. */
17074 if (divisor == (divisor & -divisor))
17076 tree t;
17078 t = build_int_cst (TREE_TYPE (value), -divisor);
17079 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
17081 else
17083 if (!div)
17084 div = build_int_cst (TREE_TYPE (value), divisor);
17085 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
17086 value = size_binop_loc (loc, MULT_EXPR, value, div);
17089 return value;
17092 /* Returns the pointer to the base of the object addressed by EXP and
17093 extracts the information about the offset of the access, storing it
17094 to PBITPOS and POFFSET. */
17096 static tree
17097 split_address_to_core_and_offset (tree exp,
17098 HOST_WIDE_INT *pbitpos, tree *poffset)
17100 tree core;
17101 enum machine_mode mode;
17102 int unsignedp, volatilep;
17103 HOST_WIDE_INT bitsize;
17104 location_t loc = EXPR_LOCATION (exp);
17106 if (TREE_CODE (exp) == ADDR_EXPR)
17108 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
17109 poffset, &mode, &unsignedp, &volatilep,
17110 false);
17111 core = build_fold_addr_expr_loc (loc, core);
17113 else
17115 core = exp;
17116 *pbitpos = 0;
17117 *poffset = NULL_TREE;
17120 return core;
17123 /* Returns true if addresses of E1 and E2 differ by a constant, false
17124 otherwise. If they do, E1 - E2 is stored in *DIFF. */
17126 bool
17127 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
17129 tree core1, core2;
17130 HOST_WIDE_INT bitpos1, bitpos2;
17131 tree toffset1, toffset2, tdiff, type;
17133 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17134 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17136 if (bitpos1 % BITS_PER_UNIT != 0
17137 || bitpos2 % BITS_PER_UNIT != 0
17138 || !operand_equal_p (core1, core2, 0))
17139 return false;
17141 if (toffset1 && toffset2)
17143 type = TREE_TYPE (toffset1);
17144 if (type != TREE_TYPE (toffset2))
17145 toffset2 = fold_convert (type, toffset2);
17147 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17148 if (!cst_and_fits_in_hwi (tdiff))
17149 return false;
17151 *diff = int_cst_value (tdiff);
17153 else if (toffset1 || toffset2)
17155 /* If only one of the offsets is non-constant, the difference cannot
17156 be a constant. */
17157 return false;
17159 else
17160 *diff = 0;
17162 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17163 return true;
17166 /* Simplify the floating point expression EXP when the sign of the
17167 result is not significant. Return NULL_TREE if no simplification
17168 is possible. */
17170 tree
17171 fold_strip_sign_ops (tree exp)
17173 tree arg0, arg1;
17174 location_t loc = EXPR_LOCATION (exp);
17176 switch (TREE_CODE (exp))
17178 case ABS_EXPR:
17179 case NEGATE_EXPR:
17180 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17181 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17183 case MULT_EXPR:
17184 case RDIV_EXPR:
17185 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17186 return NULL_TREE;
17187 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17188 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17189 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17190 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17191 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17192 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17193 break;
17195 case COMPOUND_EXPR:
17196 arg0 = TREE_OPERAND (exp, 0);
17197 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17198 if (arg1)
17199 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17200 break;
17202 case COND_EXPR:
17203 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17204 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17205 if (arg0 || arg1)
17206 return fold_build3_loc (loc,
17207 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17208 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17209 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17210 break;
17212 case CALL_EXPR:
17214 const enum built_in_function fcode = builtin_mathfn_code (exp);
17215 switch (fcode)
17217 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17218 /* Strip copysign function call, return the 1st argument. */
17219 arg0 = CALL_EXPR_ARG (exp, 0);
17220 arg1 = CALL_EXPR_ARG (exp, 1);
17221 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17223 default:
17224 /* Strip sign ops from the argument of "odd" math functions. */
17225 if (negate_mathfn_p (fcode))
17227 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17228 if (arg0)
17229 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17231 break;
17234 break;
17236 default:
17237 break;
17239 return NULL_TREE;