Merge branches/gcc-4_9-branch rev 225109.
[official-gcc.git] / gcc-4_9-branch / gcc / fold-const.c
blob1021ab032bbfc16520189209a0ba66bd89053612
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
72 /* Nonzero if we are folding constants inside an initializer; zero
73 otherwise. */
74 int folding_initializer = 0;
76 /* The following constants represent a bit based encoding of GCC's
77 comparison operators. This encoding simplifies transformations
78 on relational comparison operators, such as AND and OR. */
79 enum comparison_code {
80 COMPCODE_FALSE = 0,
81 COMPCODE_LT = 1,
82 COMPCODE_EQ = 2,
83 COMPCODE_LE = 3,
84 COMPCODE_GT = 4,
85 COMPCODE_LTGT = 5,
86 COMPCODE_GE = 6,
87 COMPCODE_ORD = 7,
88 COMPCODE_UNORD = 8,
89 COMPCODE_UNLT = 9,
90 COMPCODE_UNEQ = 10,
91 COMPCODE_UNLE = 11,
92 COMPCODE_UNGT = 12,
93 COMPCODE_NE = 13,
94 COMPCODE_UNGE = 14,
95 COMPCODE_TRUE = 15
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
130 tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static tree fold_mathfn_compare (location_t,
138 enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
151 static location_t
152 expr_location_or (tree t, location_t loc)
154 location_t tloc = EXPR_LOCATION (t);
155 return tloc == UNKNOWN_LOCATION ? loc : tloc;
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
161 static inline tree
162 protected_set_expr_location_unshare (tree x, location_t loc)
164 if (CAN_HAVE_LOCATION_P (x)
165 && EXPR_LOCATION (x) != loc
166 && !(TREE_CODE (x) == SAVE_EXPR
167 || TREE_CODE (x) == TARGET_EXPR
168 || TREE_CODE (x) == BIND_EXPR))
170 x = copy_node (x);
171 SET_EXPR_LOCATION (x, loc);
173 return x;
176 /* If ARG2 divides ARG1 with zero remainder, carries out the division
177 of type CODE and returns the quotient.
178 Otherwise returns NULL_TREE. */
180 tree
181 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
183 double_int quo, rem;
184 int uns;
186 /* The sign of the division is according to operand two, that
187 does the correct thing for POINTER_PLUS_EXPR where we want
188 a signed division. */
189 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
191 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
192 uns, code, &rem);
194 if (rem.is_zero ())
195 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
197 return NULL_TREE;
200 /* This is nonzero if we should defer warnings about undefined
201 overflow. This facility exists because these warnings are a
202 special case. The code to estimate loop iterations does not want
203 to issue any warnings, since it works with expressions which do not
204 occur in user code. Various bits of cleanup code call fold(), but
205 only use the result if it has certain characteristics (e.g., is a
206 constant); that code only wants to issue a warning if the result is
207 used. */
209 static int fold_deferring_overflow_warnings;
211 /* If a warning about undefined overflow is deferred, this is the
212 warning. Note that this may cause us to turn two warnings into
213 one, but that is fine since it is sufficient to only give one
214 warning per expression. */
216 static const char* fold_deferred_overflow_warning;
218 /* If a warning about undefined overflow is deferred, this is the
219 level at which the warning should be emitted. */
221 static enum warn_strict_overflow_code fold_deferred_overflow_code;
223 /* Start deferring overflow warnings. We could use a stack here to
224 permit nested calls, but at present it is not necessary. */
226 void
227 fold_defer_overflow_warnings (void)
229 ++fold_deferring_overflow_warnings;
232 /* Stop deferring overflow warnings. If there is a pending warning,
233 and ISSUE is true, then issue the warning if appropriate. STMT is
234 the statement with which the warning should be associated (used for
235 location information); STMT may be NULL. CODE is the level of the
236 warning--a warn_strict_overflow_code value. This function will use
237 the smaller of CODE and the deferred code when deciding whether to
238 issue the warning. CODE may be zero to mean to always use the
239 deferred code. */
241 void
242 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
244 const char *warnmsg;
245 location_t locus;
247 gcc_assert (fold_deferring_overflow_warnings > 0);
248 --fold_deferring_overflow_warnings;
249 if (fold_deferring_overflow_warnings > 0)
251 if (fold_deferred_overflow_warning != NULL
252 && code != 0
253 && code < (int) fold_deferred_overflow_code)
254 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
255 return;
258 warnmsg = fold_deferred_overflow_warning;
259 fold_deferred_overflow_warning = NULL;
261 if (!issue || warnmsg == NULL)
262 return;
264 if (gimple_no_warning_p (stmt))
265 return;
267 /* Use the smallest code level when deciding to issue the
268 warning. */
269 if (code == 0 || code > (int) fold_deferred_overflow_code)
270 code = fold_deferred_overflow_code;
272 if (!issue_strict_overflow_warning (code))
273 return;
275 if (stmt == NULL)
276 locus = input_location;
277 else
278 locus = gimple_location (stmt);
279 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
282 /* Stop deferring overflow warnings, ignoring any deferred
283 warnings. */
285 void
286 fold_undefer_and_ignore_overflow_warnings (void)
288 fold_undefer_overflow_warnings (false, NULL, 0);
291 /* Whether we are deferring overflow warnings. */
293 bool
294 fold_deferring_overflow_warnings_p (void)
296 return fold_deferring_overflow_warnings > 0;
299 /* This is called when we fold something based on the fact that signed
300 overflow is undefined. */
302 static void
303 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
305 if (fold_deferring_overflow_warnings > 0)
307 if (fold_deferred_overflow_warning == NULL
308 || wc < fold_deferred_overflow_code)
310 fold_deferred_overflow_warning = gmsgid;
311 fold_deferred_overflow_code = wc;
314 else if (issue_strict_overflow_warning (wc))
315 warning (OPT_Wstrict_overflow, gmsgid);
318 /* Return true if the built-in mathematical function specified by CODE
319 is odd, i.e. -f(x) == f(-x). */
321 static bool
322 negate_mathfn_p (enum built_in_function code)
324 switch (code)
326 CASE_FLT_FN (BUILT_IN_ASIN):
327 CASE_FLT_FN (BUILT_IN_ASINH):
328 CASE_FLT_FN (BUILT_IN_ATAN):
329 CASE_FLT_FN (BUILT_IN_ATANH):
330 CASE_FLT_FN (BUILT_IN_CASIN):
331 CASE_FLT_FN (BUILT_IN_CASINH):
332 CASE_FLT_FN (BUILT_IN_CATAN):
333 CASE_FLT_FN (BUILT_IN_CATANH):
334 CASE_FLT_FN (BUILT_IN_CBRT):
335 CASE_FLT_FN (BUILT_IN_CPROJ):
336 CASE_FLT_FN (BUILT_IN_CSIN):
337 CASE_FLT_FN (BUILT_IN_CSINH):
338 CASE_FLT_FN (BUILT_IN_CTAN):
339 CASE_FLT_FN (BUILT_IN_CTANH):
340 CASE_FLT_FN (BUILT_IN_ERF):
341 CASE_FLT_FN (BUILT_IN_LLROUND):
342 CASE_FLT_FN (BUILT_IN_LROUND):
343 CASE_FLT_FN (BUILT_IN_ROUND):
344 CASE_FLT_FN (BUILT_IN_SIN):
345 CASE_FLT_FN (BUILT_IN_SINH):
346 CASE_FLT_FN (BUILT_IN_TAN):
347 CASE_FLT_FN (BUILT_IN_TANH):
348 CASE_FLT_FN (BUILT_IN_TRUNC):
349 return true;
351 CASE_FLT_FN (BUILT_IN_LLRINT):
352 CASE_FLT_FN (BUILT_IN_LRINT):
353 CASE_FLT_FN (BUILT_IN_NEARBYINT):
354 CASE_FLT_FN (BUILT_IN_RINT):
355 return !flag_rounding_math;
357 default:
358 break;
360 return false;
363 /* Check whether we may negate an integer constant T without causing
364 overflow. */
366 bool
367 may_negate_without_overflow_p (const_tree t)
369 unsigned HOST_WIDE_INT val;
370 unsigned int prec;
371 tree type;
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
379 prec = TYPE_PRECISION (type);
380 if (prec > HOST_BITS_PER_WIDE_INT)
382 if (TREE_INT_CST_LOW (t) != 0)
383 return true;
384 prec -= HOST_BITS_PER_WIDE_INT;
385 val = TREE_INT_CST_HIGH (t);
387 else
388 val = TREE_INT_CST_LOW (t);
389 if (prec < HOST_BITS_PER_WIDE_INT)
390 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
391 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
394 /* Determine whether an expression T can be cheaply negated using
395 the function negate_expr without introducing undefined overflow. */
397 static bool
398 negate_expr_p (tree t)
400 tree type;
402 if (t == 0)
403 return false;
405 type = TREE_TYPE (t);
407 STRIP_SIGN_NOPS (t);
408 switch (TREE_CODE (t))
410 case INTEGER_CST:
411 if (TYPE_OVERFLOW_WRAPS (type))
412 return true;
414 /* Check that -CST will not overflow type. */
415 return may_negate_without_overflow_p (t);
416 case BIT_NOT_EXPR:
417 return (INTEGRAL_TYPE_P (type)
418 && TYPE_OVERFLOW_WRAPS (type));
420 case FIXED_CST:
421 case NEGATE_EXPR:
422 return true;
424 case REAL_CST:
425 /* We want to canonicalize to positive real constants. Pretend
426 that only negative ones can be easily negated. */
427 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
429 case COMPLEX_CST:
430 return negate_expr_p (TREE_REALPART (t))
431 && negate_expr_p (TREE_IMAGPART (t));
433 case VECTOR_CST:
435 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
436 return true;
438 int count = TYPE_VECTOR_SUBPARTS (type), i;
440 for (i = 0; i < count; i++)
441 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
442 return false;
444 return true;
447 case COMPLEX_EXPR:
448 return negate_expr_p (TREE_OPERAND (t, 0))
449 && negate_expr_p (TREE_OPERAND (t, 1));
451 case CONJ_EXPR:
452 return negate_expr_p (TREE_OPERAND (t, 0));
454 case PLUS_EXPR:
455 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
456 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
457 return false;
458 /* -(A + B) -> (-B) - A. */
459 if (negate_expr_p (TREE_OPERAND (t, 1))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1)))
462 return true;
463 /* -(A + B) -> (-A) - B. */
464 return negate_expr_p (TREE_OPERAND (t, 0));
466 case MINUS_EXPR:
467 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
468 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
469 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1));
473 case MULT_EXPR:
474 if (TYPE_UNSIGNED (TREE_TYPE (t)))
475 break;
477 /* Fall through. */
479 case RDIV_EXPR:
480 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
481 return negate_expr_p (TREE_OPERAND (t, 1))
482 || negate_expr_p (TREE_OPERAND (t, 0));
483 break;
485 case TRUNC_DIV_EXPR:
486 case ROUND_DIV_EXPR:
487 case EXACT_DIV_EXPR:
488 /* In general we can't negate A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. But if overflow is
491 undefined, we can negate, because - (INT_MIN / 1) is an
492 overflow. */
493 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
495 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
496 break;
497 /* If overflow is undefined then we have to be careful because
498 we ask whether it's ok to associate the negate with the
499 division which is not ok for example for
500 -((a - b) / c) where (-(a - b)) / c may invoke undefined
501 overflow because of negating INT_MIN. So do not use
502 negate_expr_p here but open-code the two important cases. */
503 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
504 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
505 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
506 return true;
508 else if (negate_expr_p (TREE_OPERAND (t, 0)))
509 return true;
510 return negate_expr_p (TREE_OPERAND (t, 1));
512 case NOP_EXPR:
513 /* Negate -((double)float) as (double)(-float). */
514 if (TREE_CODE (type) == REAL_TYPE)
516 tree tem = strip_float_extensions (t);
517 if (tem != t)
518 return negate_expr_p (tem);
520 break;
522 case CALL_EXPR:
523 /* Negate -f(x) as f(-x). */
524 if (negate_mathfn_p (builtin_mathfn_code (t)))
525 return negate_expr_p (CALL_EXPR_ARG (t, 0));
526 break;
528 case RSHIFT_EXPR:
529 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
530 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
532 tree op1 = TREE_OPERAND (t, 1);
533 if (TREE_INT_CST_HIGH (op1) == 0
534 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
535 == TREE_INT_CST_LOW (op1))
536 return true;
538 break;
540 default:
541 break;
543 return false;
546 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
547 simplification is possible.
548 If negate_expr_p would return true for T, NULL_TREE will never be
549 returned. */
551 static tree
552 fold_negate_expr (location_t loc, tree t)
554 tree type = TREE_TYPE (t);
555 tree tem;
557 switch (TREE_CODE (t))
559 /* Convert - (~A) to A + 1. */
560 case BIT_NOT_EXPR:
561 if (INTEGRAL_TYPE_P (type))
562 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
563 build_one_cst (type));
564 break;
566 case INTEGER_CST:
567 tem = fold_negate_const (t, type);
568 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
569 || !TYPE_OVERFLOW_TRAPS (type))
570 return tem;
571 break;
573 case REAL_CST:
574 tem = fold_negate_const (t, type);
575 /* Two's complement FP formats, such as c4x, may overflow. */
576 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
577 return tem;
578 break;
580 case FIXED_CST:
581 tem = fold_negate_const (t, type);
582 return tem;
584 case COMPLEX_CST:
586 tree rpart = negate_expr (TREE_REALPART (t));
587 tree ipart = negate_expr (TREE_IMAGPART (t));
589 if ((TREE_CODE (rpart) == REAL_CST
590 && TREE_CODE (ipart) == REAL_CST)
591 || (TREE_CODE (rpart) == INTEGER_CST
592 && TREE_CODE (ipart) == INTEGER_CST))
593 return build_complex (type, rpart, ipart);
595 break;
597 case VECTOR_CST:
599 int count = TYPE_VECTOR_SUBPARTS (type), i;
600 tree *elts = XALLOCAVEC (tree, count);
602 for (i = 0; i < count; i++)
604 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
605 if (elts[i] == NULL_TREE)
606 return NULL_TREE;
609 return build_vector (type, elts);
612 case COMPLEX_EXPR:
613 if (negate_expr_p (t))
614 return fold_build2_loc (loc, COMPLEX_EXPR, type,
615 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
616 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
617 break;
619 case CONJ_EXPR:
620 if (negate_expr_p (t))
621 return fold_build1_loc (loc, CONJ_EXPR, type,
622 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
623 break;
625 case NEGATE_EXPR:
626 return TREE_OPERAND (t, 0);
628 case PLUS_EXPR:
629 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
630 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
632 /* -(A + B) -> (-B) - A. */
633 if (negate_expr_p (TREE_OPERAND (t, 1))
634 && reorder_operands_p (TREE_OPERAND (t, 0),
635 TREE_OPERAND (t, 1)))
637 tem = negate_expr (TREE_OPERAND (t, 1));
638 return fold_build2_loc (loc, MINUS_EXPR, type,
639 tem, TREE_OPERAND (t, 0));
642 /* -(A + B) -> (-A) - B. */
643 if (negate_expr_p (TREE_OPERAND (t, 0)))
645 tem = negate_expr (TREE_OPERAND (t, 0));
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 tem, TREE_OPERAND (t, 1));
650 break;
652 case MINUS_EXPR:
653 /* - (A - B) -> B - A */
654 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
655 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
656 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
657 return fold_build2_loc (loc, MINUS_EXPR, type,
658 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
659 break;
661 case MULT_EXPR:
662 if (TYPE_UNSIGNED (type))
663 break;
665 /* Fall through. */
667 case RDIV_EXPR:
668 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
670 tem = TREE_OPERAND (t, 1);
671 if (negate_expr_p (tem))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 TREE_OPERAND (t, 0), negate_expr (tem));
674 tem = TREE_OPERAND (t, 0);
675 if (negate_expr_p (tem))
676 return fold_build2_loc (loc, TREE_CODE (t), type,
677 negate_expr (tem), TREE_OPERAND (t, 1));
679 break;
681 case TRUNC_DIV_EXPR:
682 case ROUND_DIV_EXPR:
683 case EXACT_DIV_EXPR:
684 /* In general we can't negate A / B, because if A is INT_MIN and
685 B is 1, we may turn this into INT_MIN / -1 which is undefined
686 and actually traps on some architectures. But if overflow is
687 undefined, we can negate, because - (INT_MIN / 1) is an
688 overflow. */
689 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
691 const char * const warnmsg = G_("assuming signed overflow does not "
692 "occur when negating a division");
693 tem = TREE_OPERAND (t, 1);
694 if (negate_expr_p (tem))
696 if (INTEGRAL_TYPE_P (type)
697 && (TREE_CODE (tem) != INTEGER_CST
698 || integer_onep (tem)))
699 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
700 return fold_build2_loc (loc, TREE_CODE (t), type,
701 TREE_OPERAND (t, 0), negate_expr (tem));
703 /* If overflow is undefined then we have to be careful because
704 we ask whether it's ok to associate the negate with the
705 division which is not ok for example for
706 -((a - b) / c) where (-(a - b)) / c may invoke undefined
707 overflow because of negating INT_MIN. So do not use
708 negate_expr_p here but open-code the two important cases. */
709 tem = TREE_OPERAND (t, 0);
710 if ((INTEGRAL_TYPE_P (type)
711 && (TREE_CODE (tem) == NEGATE_EXPR
712 || (TREE_CODE (tem) == INTEGER_CST
713 && may_negate_without_overflow_p (tem))))
714 || !INTEGRAL_TYPE_P (type))
715 return fold_build2_loc (loc, TREE_CODE (t), type,
716 negate_expr (tem), TREE_OPERAND (t, 1));
718 break;
720 case NOP_EXPR:
721 /* Convert -((double)float) into (double)(-float). */
722 if (TREE_CODE (type) == REAL_TYPE)
724 tem = strip_float_extensions (t);
725 if (tem != t && negate_expr_p (tem))
726 return fold_convert_loc (loc, type, negate_expr (tem));
728 break;
730 case CALL_EXPR:
731 /* Negate -f(x) as f(-x). */
732 if (negate_mathfn_p (builtin_mathfn_code (t))
733 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
735 tree fndecl, arg;
737 fndecl = get_callee_fndecl (t);
738 arg = negate_expr (CALL_EXPR_ARG (t, 0));
739 return build_call_expr_loc (loc, fndecl, 1, arg);
741 break;
743 case RSHIFT_EXPR:
744 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
745 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
747 tree op1 = TREE_OPERAND (t, 1);
748 if (TREE_INT_CST_HIGH (op1) == 0
749 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
750 == TREE_INT_CST_LOW (op1))
752 tree ntype = TYPE_UNSIGNED (type)
753 ? signed_type_for (type)
754 : unsigned_type_for (type);
755 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
756 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
757 return fold_convert_loc (loc, type, temp);
760 break;
762 default:
763 break;
766 return NULL_TREE;
769 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
770 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
771 return NULL_TREE. */
773 static tree
774 negate_expr (tree t)
776 tree type, tem;
777 location_t loc;
779 if (t == NULL_TREE)
780 return NULL_TREE;
782 loc = EXPR_LOCATION (t);
783 type = TREE_TYPE (t);
784 STRIP_SIGN_NOPS (t);
786 tem = fold_negate_expr (loc, t);
787 if (!tem)
788 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
789 return fold_convert_loc (loc, type, tem);
792 /* Split a tree IN into a constant, literal and variable parts that could be
793 combined with CODE to make IN. "constant" means an expression with
794 TREE_CONSTANT but that isn't an actual constant. CODE must be a
795 commutative arithmetic operation. Store the constant part into *CONP,
796 the literal in *LITP and return the variable part. If a part isn't
797 present, set it to null. If the tree does not decompose in this way,
798 return the entire tree as the variable part and the other parts as null.
800 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
801 case, we negate an operand that was subtracted. Except if it is a
802 literal for which we use *MINUS_LITP instead.
804 If NEGATE_P is true, we are negating all of IN, again except a literal
805 for which we use *MINUS_LITP instead.
807 If IN is itself a literal or constant, return it as appropriate.
809 Note that we do not guarantee that any of the three values will be the
810 same type as IN, but they will have the same signedness and mode. */
812 static tree
813 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
814 tree *minus_litp, int negate_p)
816 tree var = 0;
818 *conp = 0;
819 *litp = 0;
820 *minus_litp = 0;
822 /* Strip any conversions that don't change the machine mode or signedness. */
823 STRIP_SIGN_NOPS (in);
825 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
826 || TREE_CODE (in) == FIXED_CST)
827 *litp = in;
828 else if (TREE_CODE (in) == code
829 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
830 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
831 /* We can associate addition and subtraction together (even
832 though the C standard doesn't say so) for integers because
833 the value is not affected. For reals, the value might be
834 affected, so we can't. */
835 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
836 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
838 tree op0 = TREE_OPERAND (in, 0);
839 tree op1 = TREE_OPERAND (in, 1);
840 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
841 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
843 /* First see if either of the operands is a literal, then a constant. */
844 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
845 || TREE_CODE (op0) == FIXED_CST)
846 *litp = op0, op0 = 0;
847 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
848 || TREE_CODE (op1) == FIXED_CST)
849 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
851 if (op0 != 0 && TREE_CONSTANT (op0))
852 *conp = op0, op0 = 0;
853 else if (op1 != 0 && TREE_CONSTANT (op1))
854 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
856 /* If we haven't dealt with either operand, this is not a case we can
857 decompose. Otherwise, VAR is either of the ones remaining, if any. */
858 if (op0 != 0 && op1 != 0)
859 var = in;
860 else if (op0 != 0)
861 var = op0;
862 else
863 var = op1, neg_var_p = neg1_p;
865 /* Now do any needed negations. */
866 if (neg_litp_p)
867 *minus_litp = *litp, *litp = 0;
868 if (neg_conp_p)
869 *conp = negate_expr (*conp);
870 if (neg_var_p)
871 var = negate_expr (var);
873 else if (TREE_CODE (in) == BIT_NOT_EXPR
874 && code == PLUS_EXPR)
876 /* -X - 1 is folded to ~X, undo that here. */
877 *minus_litp = build_one_cst (TREE_TYPE (in));
878 var = negate_expr (TREE_OPERAND (in, 0));
880 else if (TREE_CONSTANT (in))
881 *conp = in;
882 else
883 var = in;
885 if (negate_p)
887 if (*litp)
888 *minus_litp = *litp, *litp = 0;
889 else if (*minus_litp)
890 *litp = *minus_litp, *minus_litp = 0;
891 *conp = negate_expr (*conp);
892 var = negate_expr (var);
895 return var;
898 /* Re-associate trees split by the above function. T1 and T2 are
899 either expressions to associate or null. Return the new
900 expression, if any. LOC is the location of the new expression. If
901 we build an operation, do it in TYPE and with CODE. */
903 static tree
904 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
906 if (t1 == 0)
907 return t2;
908 else if (t2 == 0)
909 return t1;
911 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
912 try to fold this since we will have infinite recursion. But do
913 deal with any NEGATE_EXPRs. */
914 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
915 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
917 if (code == PLUS_EXPR)
919 if (TREE_CODE (t1) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t2),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t1, 0)));
924 else if (TREE_CODE (t2) == NEGATE_EXPR)
925 return build2_loc (loc, MINUS_EXPR, type,
926 fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type,
928 TREE_OPERAND (t2, 0)));
929 else if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
932 else if (code == MINUS_EXPR)
934 if (integer_zerop (t2))
935 return fold_convert_loc (loc, type, t1);
938 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
942 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
946 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
947 for use in int_const_binop, size_binop and size_diffop. */
949 static bool
950 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
952 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
953 return false;
954 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
955 return false;
957 switch (code)
959 case LSHIFT_EXPR:
960 case RSHIFT_EXPR:
961 case LROTATE_EXPR:
962 case RROTATE_EXPR:
963 return true;
965 default:
966 break;
969 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
970 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
971 && TYPE_MODE (type1) == TYPE_MODE (type2);
975 /* Combine two integer constants ARG1 and ARG2 under operation CODE
976 to produce a new constant. Return NULL_TREE if we don't know how
977 to evaluate CODE at compile-time. */
979 static tree
980 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
981 int overflowable)
983 double_int op1, op2, res, tmp;
984 tree t;
985 tree type = TREE_TYPE (arg1);
986 bool uns = TYPE_UNSIGNED (type);
987 bool overflow = false;
989 op1 = tree_to_double_int (arg1);
990 op2 = tree_to_double_int (arg2);
992 switch (code)
994 case BIT_IOR_EXPR:
995 res = op1 | op2;
996 break;
998 case BIT_XOR_EXPR:
999 res = op1 ^ op2;
1000 break;
1002 case BIT_AND_EXPR:
1003 res = op1 & op2;
1004 break;
1006 case RSHIFT_EXPR:
1007 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1008 break;
1010 case LSHIFT_EXPR:
1011 /* It's unclear from the C standard whether shifts can overflow.
1012 The following code ignores overflow; perhaps a C standard
1013 interpretation ruling is needed. */
1014 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1015 break;
1017 case RROTATE_EXPR:
1018 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1019 break;
1021 case LROTATE_EXPR:
1022 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1023 break;
1025 case PLUS_EXPR:
1026 res = op1.add_with_sign (op2, false, &overflow);
1027 break;
1029 case MINUS_EXPR:
1030 res = op1.sub_with_overflow (op2, &overflow);
1031 break;
1033 case MULT_EXPR:
1034 res = op1.mul_with_sign (op2, false, &overflow);
1035 break;
1037 case MULT_HIGHPART_EXPR:
1038 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1040 bool dummy_overflow;
1041 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1042 return NULL_TREE;
1043 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1045 else
1047 bool dummy_overflow;
1048 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1049 is performed in twice the precision of arguments. */
1050 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1051 res = tmp.rshift (TYPE_PRECISION (type),
1052 2 * TYPE_PRECISION (type), !uns);
1054 break;
1056 case TRUNC_DIV_EXPR:
1057 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1058 case EXACT_DIV_EXPR:
1059 /* This is a shortcut for a common special case. */
1060 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1061 && !TREE_OVERFLOW (arg1)
1062 && !TREE_OVERFLOW (arg2)
1063 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1065 if (code == CEIL_DIV_EXPR)
1066 op1.low += op2.low - 1;
1068 res.low = op1.low / op2.low, res.high = 0;
1069 break;
1072 /* ... fall through ... */
1074 case ROUND_DIV_EXPR:
1075 if (op2.is_zero ())
1076 return NULL_TREE;
1077 if (op2.is_one ())
1079 res = op1;
1080 break;
1082 if (op1 == op2 && !op1.is_zero ())
1084 res = double_int_one;
1085 break;
1087 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1088 break;
1090 case TRUNC_MOD_EXPR:
1091 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1092 /* This is a shortcut for a common special case. */
1093 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1094 && !TREE_OVERFLOW (arg1)
1095 && !TREE_OVERFLOW (arg2)
1096 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1098 if (code == CEIL_MOD_EXPR)
1099 op1.low += op2.low - 1;
1100 res.low = op1.low % op2.low, res.high = 0;
1101 break;
1104 /* ... fall through ... */
1106 case ROUND_MOD_EXPR:
1107 if (op2.is_zero ())
1108 return NULL_TREE;
1110 /* Check for the case the case of INT_MIN % -1 and return
1111 overflow and result = 0. The TImode case is handled properly
1112 in double-int. */
1113 if (TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT
1114 && !uns
1115 && op2.is_minus_one ()
1116 && op1.high == (HOST_WIDE_INT) -1
1117 && (HOST_WIDE_INT) op1.low
1118 == (((HOST_WIDE_INT)-1) << (TYPE_PRECISION (type) - 1)))
1120 overflow = 1;
1121 res = double_int_zero;
1123 else
1124 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1125 break;
1127 case MIN_EXPR:
1128 res = op1.min (op2, uns);
1129 break;
1131 case MAX_EXPR:
1132 res = op1.max (op2, uns);
1133 break;
1135 default:
1136 return NULL_TREE;
1139 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1140 (!uns && overflow)
1141 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1143 return t;
1146 tree
1147 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1149 return int_const_binop_1 (code, arg1, arg2, 1);
1152 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1153 constant. We assume ARG1 and ARG2 have the same data type, or at least
1154 are the same kind of constant and the same machine mode. Return zero if
1155 combining the constants is not allowed in the current operating mode. */
1157 static tree
1158 const_binop (enum tree_code code, tree arg1, tree arg2)
1160 /* Sanity check for the recursive cases. */
1161 if (!arg1 || !arg2)
1162 return NULL_TREE;
1164 STRIP_NOPS (arg1);
1165 STRIP_NOPS (arg2);
1167 if (TREE_CODE (arg1) == INTEGER_CST)
1168 return int_const_binop (code, arg1, arg2);
1170 if (TREE_CODE (arg1) == REAL_CST)
1172 enum machine_mode mode;
1173 REAL_VALUE_TYPE d1;
1174 REAL_VALUE_TYPE d2;
1175 REAL_VALUE_TYPE value;
1176 REAL_VALUE_TYPE result;
1177 bool inexact;
1178 tree t, type;
1180 /* The following codes are handled by real_arithmetic. */
1181 switch (code)
1183 case PLUS_EXPR:
1184 case MINUS_EXPR:
1185 case MULT_EXPR:
1186 case RDIV_EXPR:
1187 case MIN_EXPR:
1188 case MAX_EXPR:
1189 break;
1191 default:
1192 return NULL_TREE;
1195 d1 = TREE_REAL_CST (arg1);
1196 d2 = TREE_REAL_CST (arg2);
1198 type = TREE_TYPE (arg1);
1199 mode = TYPE_MODE (type);
1201 /* Don't perform operation if we honor signaling NaNs and
1202 either operand is a NaN. */
1203 if (HONOR_SNANS (mode)
1204 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1205 return NULL_TREE;
1207 /* Don't perform operation if it would raise a division
1208 by zero exception. */
1209 if (code == RDIV_EXPR
1210 && REAL_VALUES_EQUAL (d2, dconst0)
1211 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1212 return NULL_TREE;
1214 /* If either operand is a NaN, just return it. Otherwise, set up
1215 for floating-point trap; we return an overflow. */
1216 if (REAL_VALUE_ISNAN (d1))
1217 return arg1;
1218 else if (REAL_VALUE_ISNAN (d2))
1219 return arg2;
1221 inexact = real_arithmetic (&value, code, &d1, &d2);
1222 real_convert (&result, mode, &value);
1224 /* Don't constant fold this floating point operation if
1225 the result has overflowed and flag_trapping_math. */
1226 if (flag_trapping_math
1227 && MODE_HAS_INFINITIES (mode)
1228 && REAL_VALUE_ISINF (result)
1229 && !REAL_VALUE_ISINF (d1)
1230 && !REAL_VALUE_ISINF (d2))
1231 return NULL_TREE;
1233 /* Don't constant fold this floating point operation if the
1234 result may dependent upon the run-time rounding mode and
1235 flag_rounding_math is set, or if GCC's software emulation
1236 is unable to accurately represent the result. */
1237 if ((flag_rounding_math
1238 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1239 && (inexact || !real_identical (&result, &value)))
1240 return NULL_TREE;
1242 t = build_real (type, result);
1244 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1245 return t;
1248 if (TREE_CODE (arg1) == FIXED_CST)
1250 FIXED_VALUE_TYPE f1;
1251 FIXED_VALUE_TYPE f2;
1252 FIXED_VALUE_TYPE result;
1253 tree t, type;
1254 int sat_p;
1255 bool overflow_p;
1257 /* The following codes are handled by fixed_arithmetic. */
1258 switch (code)
1260 case PLUS_EXPR:
1261 case MINUS_EXPR:
1262 case MULT_EXPR:
1263 case TRUNC_DIV_EXPR:
1264 f2 = TREE_FIXED_CST (arg2);
1265 break;
1267 case LSHIFT_EXPR:
1268 case RSHIFT_EXPR:
1269 f2.data.high = TREE_INT_CST_HIGH (arg2);
1270 f2.data.low = TREE_INT_CST_LOW (arg2);
1271 f2.mode = SImode;
1272 break;
1274 default:
1275 return NULL_TREE;
1278 f1 = TREE_FIXED_CST (arg1);
1279 type = TREE_TYPE (arg1);
1280 sat_p = TYPE_SATURATING (type);
1281 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1282 t = build_fixed (type, result);
1283 /* Propagate overflow flags. */
1284 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1285 TREE_OVERFLOW (t) = 1;
1286 return t;
1289 if (TREE_CODE (arg1) == COMPLEX_CST)
1291 tree type = TREE_TYPE (arg1);
1292 tree r1 = TREE_REALPART (arg1);
1293 tree i1 = TREE_IMAGPART (arg1);
1294 tree r2 = TREE_REALPART (arg2);
1295 tree i2 = TREE_IMAGPART (arg2);
1296 tree real, imag;
1298 switch (code)
1300 case PLUS_EXPR:
1301 case MINUS_EXPR:
1302 real = const_binop (code, r1, r2);
1303 imag = const_binop (code, i1, i2);
1304 break;
1306 case MULT_EXPR:
1307 if (COMPLEX_FLOAT_TYPE_P (type))
1308 return do_mpc_arg2 (arg1, arg2, type,
1309 /* do_nonfinite= */ folding_initializer,
1310 mpc_mul);
1312 real = const_binop (MINUS_EXPR,
1313 const_binop (MULT_EXPR, r1, r2),
1314 const_binop (MULT_EXPR, i1, i2));
1315 imag = const_binop (PLUS_EXPR,
1316 const_binop (MULT_EXPR, r1, i2),
1317 const_binop (MULT_EXPR, i1, r2));
1318 break;
1320 case RDIV_EXPR:
1321 if (COMPLEX_FLOAT_TYPE_P (type))
1322 return do_mpc_arg2 (arg1, arg2, type,
1323 /* do_nonfinite= */ folding_initializer,
1324 mpc_div);
1325 /* Fallthru ... */
1326 case TRUNC_DIV_EXPR:
1327 case CEIL_DIV_EXPR:
1328 case FLOOR_DIV_EXPR:
1329 case ROUND_DIV_EXPR:
1330 if (flag_complex_method == 0)
1332 /* Keep this algorithm in sync with
1333 tree-complex.c:expand_complex_div_straight().
1335 Expand complex division to scalars, straightforward algorithm.
1336 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1337 t = br*br + bi*bi
1339 tree magsquared
1340 = const_binop (PLUS_EXPR,
1341 const_binop (MULT_EXPR, r2, r2),
1342 const_binop (MULT_EXPR, i2, i2));
1343 tree t1
1344 = const_binop (PLUS_EXPR,
1345 const_binop (MULT_EXPR, r1, r2),
1346 const_binop (MULT_EXPR, i1, i2));
1347 tree t2
1348 = const_binop (MINUS_EXPR,
1349 const_binop (MULT_EXPR, i1, r2),
1350 const_binop (MULT_EXPR, r1, i2));
1352 real = const_binop (code, t1, magsquared);
1353 imag = const_binop (code, t2, magsquared);
1355 else
1357 /* Keep this algorithm in sync with
1358 tree-complex.c:expand_complex_div_wide().
1360 Expand complex division to scalars, modified algorithm to minimize
1361 overflow with wide input ranges. */
1362 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1363 fold_abs_const (r2, TREE_TYPE (type)),
1364 fold_abs_const (i2, TREE_TYPE (type)));
1366 if (integer_nonzerop (compare))
1368 /* In the TRUE branch, we compute
1369 ratio = br/bi;
1370 div = (br * ratio) + bi;
1371 tr = (ar * ratio) + ai;
1372 ti = (ai * ratio) - ar;
1373 tr = tr / div;
1374 ti = ti / div; */
1375 tree ratio = const_binop (code, r2, i2);
1376 tree div = const_binop (PLUS_EXPR, i2,
1377 const_binop (MULT_EXPR, r2, ratio));
1378 real = const_binop (MULT_EXPR, r1, ratio);
1379 real = const_binop (PLUS_EXPR, real, i1);
1380 real = const_binop (code, real, div);
1382 imag = const_binop (MULT_EXPR, i1, ratio);
1383 imag = const_binop (MINUS_EXPR, imag, r1);
1384 imag = const_binop (code, imag, div);
1386 else
1388 /* In the FALSE branch, we compute
1389 ratio = d/c;
1390 divisor = (d * ratio) + c;
1391 tr = (b * ratio) + a;
1392 ti = b - (a * ratio);
1393 tr = tr / div;
1394 ti = ti / div; */
1395 tree ratio = const_binop (code, i2, r2);
1396 tree div = const_binop (PLUS_EXPR, r2,
1397 const_binop (MULT_EXPR, i2, ratio));
1399 real = const_binop (MULT_EXPR, i1, ratio);
1400 real = const_binop (PLUS_EXPR, real, r1);
1401 real = const_binop (code, real, div);
1403 imag = const_binop (MULT_EXPR, r1, ratio);
1404 imag = const_binop (MINUS_EXPR, i1, imag);
1405 imag = const_binop (code, imag, div);
1408 break;
1410 default:
1411 return NULL_TREE;
1414 if (real && imag)
1415 return build_complex (type, real, imag);
1418 if (TREE_CODE (arg1) == VECTOR_CST
1419 && TREE_CODE (arg2) == VECTOR_CST)
1421 tree type = TREE_TYPE (arg1);
1422 int count = TYPE_VECTOR_SUBPARTS (type), i;
1423 tree *elts = XALLOCAVEC (tree, count);
1425 for (i = 0; i < count; i++)
1427 tree elem1 = VECTOR_CST_ELT (arg1, i);
1428 tree elem2 = VECTOR_CST_ELT (arg2, i);
1430 elts[i] = const_binop (code, elem1, elem2);
1432 /* It is possible that const_binop cannot handle the given
1433 code and return NULL_TREE */
1434 if (elts[i] == NULL_TREE)
1435 return NULL_TREE;
1438 return build_vector (type, elts);
1441 /* Shifts allow a scalar offset for a vector. */
1442 if (TREE_CODE (arg1) == VECTOR_CST
1443 && TREE_CODE (arg2) == INTEGER_CST)
1445 tree type = TREE_TYPE (arg1);
1446 int count = TYPE_VECTOR_SUBPARTS (type), i;
1447 tree *elts = XALLOCAVEC (tree, count);
1449 if (code == VEC_RSHIFT_EXPR)
1451 if (!tree_fits_uhwi_p (arg2))
1452 return NULL_TREE;
1454 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1455 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1456 unsigned HOST_WIDE_INT innerc
1457 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1458 if (shiftc >= outerc || (shiftc % innerc) != 0)
1459 return NULL_TREE;
1460 int offset = shiftc / innerc;
1461 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1462 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1463 vector element, but last element if BYTES_BIG_ENDIAN. */
1464 if (BYTES_BIG_ENDIAN)
1465 offset = -offset;
1466 tree zero = build_zero_cst (TREE_TYPE (type));
1467 for (i = 0; i < count; i++)
1469 if (i + offset < 0 || i + offset >= count)
1470 elts[i] = zero;
1471 else
1472 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1475 else
1476 for (i = 0; i < count; i++)
1478 tree elem1 = VECTOR_CST_ELT (arg1, i);
1480 elts[i] = const_binop (code, elem1, arg2);
1482 /* It is possible that const_binop cannot handle the given
1483 code and return NULL_TREE */
1484 if (elts[i] == NULL_TREE)
1485 return NULL_TREE;
1488 return build_vector (type, elts);
1490 return NULL_TREE;
1493 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1494 indicates which particular sizetype to create. */
1496 tree
1497 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1499 return build_int_cst (sizetype_tab[(int) kind], number);
1502 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1503 is a tree code. The type of the result is taken from the operands.
1504 Both must be equivalent integer types, ala int_binop_types_match_p.
1505 If the operands are constant, so is the result. */
1507 tree
1508 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1510 tree type = TREE_TYPE (arg0);
1512 if (arg0 == error_mark_node || arg1 == error_mark_node)
1513 return error_mark_node;
1515 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1516 TREE_TYPE (arg1)));
1518 /* Handle the special case of two integer constants faster. */
1519 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1521 /* And some specific cases even faster than that. */
1522 if (code == PLUS_EXPR)
1524 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1525 return arg1;
1526 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1527 return arg0;
1529 else if (code == MINUS_EXPR)
1531 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1532 return arg0;
1534 else if (code == MULT_EXPR)
1536 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1537 return arg1;
1540 /* Handle general case of two integer constants. For sizetype
1541 constant calculations we always want to know about overflow,
1542 even in the unsigned case. */
1543 return int_const_binop_1 (code, arg0, arg1, -1);
1546 return fold_build2_loc (loc, code, type, arg0, arg1);
1549 /* Given two values, either both of sizetype or both of bitsizetype,
1550 compute the difference between the two values. Return the value
1551 in signed type corresponding to the type of the operands. */
1553 tree
1554 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1556 tree type = TREE_TYPE (arg0);
1557 tree ctype;
1559 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1560 TREE_TYPE (arg1)));
1562 /* If the type is already signed, just do the simple thing. */
1563 if (!TYPE_UNSIGNED (type))
1564 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1566 if (type == sizetype)
1567 ctype = ssizetype;
1568 else if (type == bitsizetype)
1569 ctype = sbitsizetype;
1570 else
1571 ctype = signed_type_for (type);
1573 /* If either operand is not a constant, do the conversions to the signed
1574 type and subtract. The hardware will do the right thing with any
1575 overflow in the subtraction. */
1576 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1577 return size_binop_loc (loc, MINUS_EXPR,
1578 fold_convert_loc (loc, ctype, arg0),
1579 fold_convert_loc (loc, ctype, arg1));
1581 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1582 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1583 overflow) and negate (which can't either). Special-case a result
1584 of zero while we're here. */
1585 if (tree_int_cst_equal (arg0, arg1))
1586 return build_int_cst (ctype, 0);
1587 else if (tree_int_cst_lt (arg1, arg0))
1588 return fold_convert_loc (loc, ctype,
1589 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1590 else
1591 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1592 fold_convert_loc (loc, ctype,
1593 size_binop_loc (loc,
1594 MINUS_EXPR,
1595 arg1, arg0)));
1598 /* A subroutine of fold_convert_const handling conversions of an
1599 INTEGER_CST to another integer type. */
1601 static tree
1602 fold_convert_const_int_from_int (tree type, const_tree arg1)
1604 tree t;
1606 /* Given an integer constant, make new constant with new type,
1607 appropriately sign-extended or truncated. */
1608 t = force_fit_type_double (type, tree_to_double_int (arg1),
1609 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1610 (TREE_INT_CST_HIGH (arg1) < 0
1611 && (TYPE_UNSIGNED (type)
1612 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1613 | TREE_OVERFLOW (arg1));
1615 return t;
1618 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1619 to an integer type. */
1621 static tree
1622 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1624 int overflow = 0;
1625 tree t;
1627 /* The following code implements the floating point to integer
1628 conversion rules required by the Java Language Specification,
1629 that IEEE NaNs are mapped to zero and values that overflow
1630 the target precision saturate, i.e. values greater than
1631 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1632 are mapped to INT_MIN. These semantics are allowed by the
1633 C and C++ standards that simply state that the behavior of
1634 FP-to-integer conversion is unspecified upon overflow. */
1636 double_int val;
1637 REAL_VALUE_TYPE r;
1638 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1640 switch (code)
1642 case FIX_TRUNC_EXPR:
1643 real_trunc (&r, VOIDmode, &x);
1644 break;
1646 default:
1647 gcc_unreachable ();
1650 /* If R is NaN, return zero and show we have an overflow. */
1651 if (REAL_VALUE_ISNAN (r))
1653 overflow = 1;
1654 val = double_int_zero;
1657 /* See if R is less than the lower bound or greater than the
1658 upper bound. */
1660 if (! overflow)
1662 tree lt = TYPE_MIN_VALUE (type);
1663 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1664 if (REAL_VALUES_LESS (r, l))
1666 overflow = 1;
1667 val = tree_to_double_int (lt);
1671 if (! overflow)
1673 tree ut = TYPE_MAX_VALUE (type);
1674 if (ut)
1676 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1677 if (REAL_VALUES_LESS (u, r))
1679 overflow = 1;
1680 val = tree_to_double_int (ut);
1685 if (! overflow)
1686 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1688 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1689 return t;
1692 /* A subroutine of fold_convert_const handling conversions of a
1693 FIXED_CST to an integer type. */
1695 static tree
1696 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1698 tree t;
1699 double_int temp, temp_trunc;
1700 unsigned int mode;
1702 /* Right shift FIXED_CST to temp by fbit. */
1703 temp = TREE_FIXED_CST (arg1).data;
1704 mode = TREE_FIXED_CST (arg1).mode;
1705 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1707 temp = temp.rshift (GET_MODE_FBIT (mode),
1708 HOST_BITS_PER_DOUBLE_INT,
1709 SIGNED_FIXED_POINT_MODE_P (mode));
1711 /* Left shift temp to temp_trunc by fbit. */
1712 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1713 HOST_BITS_PER_DOUBLE_INT,
1714 SIGNED_FIXED_POINT_MODE_P (mode));
1716 else
1718 temp = double_int_zero;
1719 temp_trunc = double_int_zero;
1722 /* If FIXED_CST is negative, we need to round the value toward 0.
1723 By checking if the fractional bits are not zero to add 1 to temp. */
1724 if (SIGNED_FIXED_POINT_MODE_P (mode)
1725 && temp_trunc.is_negative ()
1726 && TREE_FIXED_CST (arg1).data != temp_trunc)
1727 temp += double_int_one;
1729 /* Given a fixed-point constant, make new constant with new type,
1730 appropriately sign-extended or truncated. */
1731 t = force_fit_type_double (type, temp, -1,
1732 (temp.is_negative ()
1733 && (TYPE_UNSIGNED (type)
1734 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1735 | TREE_OVERFLOW (arg1));
1737 return t;
1740 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1741 to another floating point type. */
1743 static tree
1744 fold_convert_const_real_from_real (tree type, const_tree arg1)
1746 REAL_VALUE_TYPE value;
1747 tree t;
1749 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1750 t = build_real (type, value);
1752 /* If converting an infinity or NAN to a representation that doesn't
1753 have one, set the overflow bit so that we can produce some kind of
1754 error message at the appropriate point if necessary. It's not the
1755 most user-friendly message, but it's better than nothing. */
1756 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1757 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1758 TREE_OVERFLOW (t) = 1;
1759 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1760 && !MODE_HAS_NANS (TYPE_MODE (type)))
1761 TREE_OVERFLOW (t) = 1;
1762 /* Regular overflow, conversion produced an infinity in a mode that
1763 can't represent them. */
1764 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1765 && REAL_VALUE_ISINF (value)
1766 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1767 TREE_OVERFLOW (t) = 1;
1768 else
1769 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1770 return t;
1773 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1774 to a floating point type. */
1776 static tree
1777 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1779 REAL_VALUE_TYPE value;
1780 tree t;
1782 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1783 t = build_real (type, value);
1785 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1786 return t;
1789 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1790 to another fixed-point type. */
1792 static tree
1793 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1795 FIXED_VALUE_TYPE value;
1796 tree t;
1797 bool overflow_p;
1799 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1800 TYPE_SATURATING (type));
1801 t = build_fixed (type, value);
1803 /* Propagate overflow flags. */
1804 if (overflow_p | TREE_OVERFLOW (arg1))
1805 TREE_OVERFLOW (t) = 1;
1806 return t;
1809 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1810 to a fixed-point type. */
1812 static tree
1813 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1815 FIXED_VALUE_TYPE value;
1816 tree t;
1817 bool overflow_p;
1819 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1820 TREE_INT_CST (arg1),
1821 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1822 TYPE_SATURATING (type));
1823 t = build_fixed (type, value);
1825 /* Propagate overflow flags. */
1826 if (overflow_p | TREE_OVERFLOW (arg1))
1827 TREE_OVERFLOW (t) = 1;
1828 return t;
1831 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1832 to a fixed-point type. */
1834 static tree
1835 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1837 FIXED_VALUE_TYPE value;
1838 tree t;
1839 bool overflow_p;
1841 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1842 &TREE_REAL_CST (arg1),
1843 TYPE_SATURATING (type));
1844 t = build_fixed (type, value);
1846 /* Propagate overflow flags. */
1847 if (overflow_p | TREE_OVERFLOW (arg1))
1848 TREE_OVERFLOW (t) = 1;
1849 return t;
1852 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1853 type TYPE. If no simplification can be done return NULL_TREE. */
1855 static tree
1856 fold_convert_const (enum tree_code code, tree type, tree arg1)
1858 if (TREE_TYPE (arg1) == type)
1859 return arg1;
1861 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1862 || TREE_CODE (type) == OFFSET_TYPE)
1864 if (TREE_CODE (arg1) == INTEGER_CST)
1865 return fold_convert_const_int_from_int (type, arg1);
1866 else if (TREE_CODE (arg1) == REAL_CST)
1867 return fold_convert_const_int_from_real (code, type, arg1);
1868 else if (TREE_CODE (arg1) == FIXED_CST)
1869 return fold_convert_const_int_from_fixed (type, arg1);
1871 else if (TREE_CODE (type) == REAL_TYPE)
1873 if (TREE_CODE (arg1) == INTEGER_CST)
1874 return build_real_from_int_cst (type, arg1);
1875 else if (TREE_CODE (arg1) == REAL_CST)
1876 return fold_convert_const_real_from_real (type, arg1);
1877 else if (TREE_CODE (arg1) == FIXED_CST)
1878 return fold_convert_const_real_from_fixed (type, arg1);
1880 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1882 if (TREE_CODE (arg1) == FIXED_CST)
1883 return fold_convert_const_fixed_from_fixed (type, arg1);
1884 else if (TREE_CODE (arg1) == INTEGER_CST)
1885 return fold_convert_const_fixed_from_int (type, arg1);
1886 else if (TREE_CODE (arg1) == REAL_CST)
1887 return fold_convert_const_fixed_from_real (type, arg1);
1889 return NULL_TREE;
1892 /* Construct a vector of zero elements of vector type TYPE. */
1894 static tree
1895 build_zero_vector (tree type)
1897 tree t;
1899 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1900 return build_vector_from_val (type, t);
1903 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1905 bool
1906 fold_convertible_p (const_tree type, const_tree arg)
1908 tree orig = TREE_TYPE (arg);
1910 if (type == orig)
1911 return true;
1913 if (TREE_CODE (arg) == ERROR_MARK
1914 || TREE_CODE (type) == ERROR_MARK
1915 || TREE_CODE (orig) == ERROR_MARK)
1916 return false;
1918 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1919 return true;
1921 switch (TREE_CODE (type))
1923 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1924 case POINTER_TYPE: case REFERENCE_TYPE:
1925 case OFFSET_TYPE:
1926 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1927 || TREE_CODE (orig) == OFFSET_TYPE)
1928 return true;
1929 return (TREE_CODE (orig) == VECTOR_TYPE
1930 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1932 case REAL_TYPE:
1933 case FIXED_POINT_TYPE:
1934 case COMPLEX_TYPE:
1935 case VECTOR_TYPE:
1936 case VOID_TYPE:
1937 return TREE_CODE (type) == TREE_CODE (orig);
1939 default:
1940 return false;
1944 /* Convert expression ARG to type TYPE. Used by the middle-end for
1945 simple conversions in preference to calling the front-end's convert. */
1947 tree
1948 fold_convert_loc (location_t loc, tree type, tree arg)
1950 tree orig = TREE_TYPE (arg);
1951 tree tem;
1953 if (type == orig)
1954 return arg;
1956 if (TREE_CODE (arg) == ERROR_MARK
1957 || TREE_CODE (type) == ERROR_MARK
1958 || TREE_CODE (orig) == ERROR_MARK)
1959 return error_mark_node;
1961 switch (TREE_CODE (type))
1963 case POINTER_TYPE:
1964 case REFERENCE_TYPE:
1965 /* Handle conversions between pointers to different address spaces. */
1966 if (POINTER_TYPE_P (orig)
1967 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1968 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1969 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1970 /* fall through */
1972 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1973 case OFFSET_TYPE:
1974 if (TREE_CODE (arg) == INTEGER_CST)
1976 tem = fold_convert_const (NOP_EXPR, type, arg);
1977 if (tem != NULL_TREE)
1978 return tem;
1980 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1981 || TREE_CODE (orig) == OFFSET_TYPE)
1982 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1983 if (TREE_CODE (orig) == COMPLEX_TYPE)
1984 return fold_convert_loc (loc, type,
1985 fold_build1_loc (loc, REALPART_EXPR,
1986 TREE_TYPE (orig), arg));
1987 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1988 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1989 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1991 case REAL_TYPE:
1992 if (TREE_CODE (arg) == INTEGER_CST)
1994 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1995 if (tem != NULL_TREE)
1996 return tem;
1998 else if (TREE_CODE (arg) == REAL_CST)
2000 tem = fold_convert_const (NOP_EXPR, type, arg);
2001 if (tem != NULL_TREE)
2002 return tem;
2004 else if (TREE_CODE (arg) == FIXED_CST)
2006 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2007 if (tem != NULL_TREE)
2008 return tem;
2011 switch (TREE_CODE (orig))
2013 case INTEGER_TYPE:
2014 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2015 case POINTER_TYPE: case REFERENCE_TYPE:
2016 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2018 case REAL_TYPE:
2019 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2021 case FIXED_POINT_TYPE:
2022 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2024 case COMPLEX_TYPE:
2025 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2026 return fold_convert_loc (loc, type, tem);
2028 default:
2029 gcc_unreachable ();
2032 case FIXED_POINT_TYPE:
2033 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2034 || TREE_CODE (arg) == REAL_CST)
2036 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2037 if (tem != NULL_TREE)
2038 goto fold_convert_exit;
2041 switch (TREE_CODE (orig))
2043 case FIXED_POINT_TYPE:
2044 case INTEGER_TYPE:
2045 case ENUMERAL_TYPE:
2046 case BOOLEAN_TYPE:
2047 case REAL_TYPE:
2048 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2050 case COMPLEX_TYPE:
2051 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2052 return fold_convert_loc (loc, type, tem);
2054 default:
2055 gcc_unreachable ();
2058 case COMPLEX_TYPE:
2059 switch (TREE_CODE (orig))
2061 case INTEGER_TYPE:
2062 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2063 case POINTER_TYPE: case REFERENCE_TYPE:
2064 case REAL_TYPE:
2065 case FIXED_POINT_TYPE:
2066 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2067 fold_convert_loc (loc, TREE_TYPE (type), arg),
2068 fold_convert_loc (loc, TREE_TYPE (type),
2069 integer_zero_node));
2070 case COMPLEX_TYPE:
2072 tree rpart, ipart;
2074 if (TREE_CODE (arg) == COMPLEX_EXPR)
2076 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2077 TREE_OPERAND (arg, 0));
2078 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2079 TREE_OPERAND (arg, 1));
2080 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2083 arg = save_expr (arg);
2084 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2085 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2086 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2087 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2088 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2091 default:
2092 gcc_unreachable ();
2095 case VECTOR_TYPE:
2096 if (integer_zerop (arg))
2097 return build_zero_vector (type);
2098 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2099 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2100 || TREE_CODE (orig) == VECTOR_TYPE);
2101 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2103 case VOID_TYPE:
2104 tem = fold_ignored_result (arg);
2105 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2107 default:
2108 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2109 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2110 gcc_unreachable ();
2112 fold_convert_exit:
2113 protected_set_expr_location_unshare (tem, loc);
2114 return tem;
2117 /* Return false if expr can be assumed not to be an lvalue, true
2118 otherwise. */
2120 static bool
2121 maybe_lvalue_p (const_tree x)
2123 /* We only need to wrap lvalue tree codes. */
2124 switch (TREE_CODE (x))
2126 case VAR_DECL:
2127 case PARM_DECL:
2128 case RESULT_DECL:
2129 case LABEL_DECL:
2130 case FUNCTION_DECL:
2131 case SSA_NAME:
2133 case COMPONENT_REF:
2134 case MEM_REF:
2135 case INDIRECT_REF:
2136 case ARRAY_REF:
2137 case ARRAY_RANGE_REF:
2138 case BIT_FIELD_REF:
2139 case OBJ_TYPE_REF:
2141 case REALPART_EXPR:
2142 case IMAGPART_EXPR:
2143 case PREINCREMENT_EXPR:
2144 case PREDECREMENT_EXPR:
2145 case SAVE_EXPR:
2146 case TRY_CATCH_EXPR:
2147 case WITH_CLEANUP_EXPR:
2148 case COMPOUND_EXPR:
2149 case MODIFY_EXPR:
2150 case TARGET_EXPR:
2151 case COND_EXPR:
2152 case BIND_EXPR:
2153 break;
2155 default:
2156 /* Assume the worst for front-end tree codes. */
2157 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2158 break;
2159 return false;
2162 return true;
2165 /* Return an expr equal to X but certainly not valid as an lvalue. */
2167 tree
2168 non_lvalue_loc (location_t loc, tree x)
2170 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2171 us. */
2172 if (in_gimple_form)
2173 return x;
2175 if (! maybe_lvalue_p (x))
2176 return x;
2177 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2180 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2181 Zero means allow extended lvalues. */
2183 int pedantic_lvalues;
2185 /* When pedantic, return an expr equal to X but certainly not valid as a
2186 pedantic lvalue. Otherwise, return X. */
2188 static tree
2189 pedantic_non_lvalue_loc (location_t loc, tree x)
2191 if (pedantic_lvalues)
2192 return non_lvalue_loc (loc, x);
2194 return protected_set_expr_location_unshare (x, loc);
2197 /* Given a tree comparison code, return the code that is the logical inverse.
2198 It is generally not safe to do this for floating-point comparisons, except
2199 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2200 ERROR_MARK in this case. */
2202 enum tree_code
2203 invert_tree_comparison (enum tree_code code, bool honor_nans)
2205 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2206 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2207 return ERROR_MARK;
2209 switch (code)
2211 case EQ_EXPR:
2212 return NE_EXPR;
2213 case NE_EXPR:
2214 return EQ_EXPR;
2215 case GT_EXPR:
2216 return honor_nans ? UNLE_EXPR : LE_EXPR;
2217 case GE_EXPR:
2218 return honor_nans ? UNLT_EXPR : LT_EXPR;
2219 case LT_EXPR:
2220 return honor_nans ? UNGE_EXPR : GE_EXPR;
2221 case LE_EXPR:
2222 return honor_nans ? UNGT_EXPR : GT_EXPR;
2223 case LTGT_EXPR:
2224 return UNEQ_EXPR;
2225 case UNEQ_EXPR:
2226 return LTGT_EXPR;
2227 case UNGT_EXPR:
2228 return LE_EXPR;
2229 case UNGE_EXPR:
2230 return LT_EXPR;
2231 case UNLT_EXPR:
2232 return GE_EXPR;
2233 case UNLE_EXPR:
2234 return GT_EXPR;
2235 case ORDERED_EXPR:
2236 return UNORDERED_EXPR;
2237 case UNORDERED_EXPR:
2238 return ORDERED_EXPR;
2239 default:
2240 gcc_unreachable ();
2244 /* Similar, but return the comparison that results if the operands are
2245 swapped. This is safe for floating-point. */
2247 enum tree_code
2248 swap_tree_comparison (enum tree_code code)
2250 switch (code)
2252 case EQ_EXPR:
2253 case NE_EXPR:
2254 case ORDERED_EXPR:
2255 case UNORDERED_EXPR:
2256 case LTGT_EXPR:
2257 case UNEQ_EXPR:
2258 return code;
2259 case GT_EXPR:
2260 return LT_EXPR;
2261 case GE_EXPR:
2262 return LE_EXPR;
2263 case LT_EXPR:
2264 return GT_EXPR;
2265 case LE_EXPR:
2266 return GE_EXPR;
2267 case UNGT_EXPR:
2268 return UNLT_EXPR;
2269 case UNGE_EXPR:
2270 return UNLE_EXPR;
2271 case UNLT_EXPR:
2272 return UNGT_EXPR;
2273 case UNLE_EXPR:
2274 return UNGE_EXPR;
2275 default:
2276 gcc_unreachable ();
2281 /* Convert a comparison tree code from an enum tree_code representation
2282 into a compcode bit-based encoding. This function is the inverse of
2283 compcode_to_comparison. */
2285 static enum comparison_code
2286 comparison_to_compcode (enum tree_code code)
2288 switch (code)
2290 case LT_EXPR:
2291 return COMPCODE_LT;
2292 case EQ_EXPR:
2293 return COMPCODE_EQ;
2294 case LE_EXPR:
2295 return COMPCODE_LE;
2296 case GT_EXPR:
2297 return COMPCODE_GT;
2298 case NE_EXPR:
2299 return COMPCODE_NE;
2300 case GE_EXPR:
2301 return COMPCODE_GE;
2302 case ORDERED_EXPR:
2303 return COMPCODE_ORD;
2304 case UNORDERED_EXPR:
2305 return COMPCODE_UNORD;
2306 case UNLT_EXPR:
2307 return COMPCODE_UNLT;
2308 case UNEQ_EXPR:
2309 return COMPCODE_UNEQ;
2310 case UNLE_EXPR:
2311 return COMPCODE_UNLE;
2312 case UNGT_EXPR:
2313 return COMPCODE_UNGT;
2314 case LTGT_EXPR:
2315 return COMPCODE_LTGT;
2316 case UNGE_EXPR:
2317 return COMPCODE_UNGE;
2318 default:
2319 gcc_unreachable ();
2323 /* Convert a compcode bit-based encoding of a comparison operator back
2324 to GCC's enum tree_code representation. This function is the
2325 inverse of comparison_to_compcode. */
2327 static enum tree_code
2328 compcode_to_comparison (enum comparison_code code)
2330 switch (code)
2332 case COMPCODE_LT:
2333 return LT_EXPR;
2334 case COMPCODE_EQ:
2335 return EQ_EXPR;
2336 case COMPCODE_LE:
2337 return LE_EXPR;
2338 case COMPCODE_GT:
2339 return GT_EXPR;
2340 case COMPCODE_NE:
2341 return NE_EXPR;
2342 case COMPCODE_GE:
2343 return GE_EXPR;
2344 case COMPCODE_ORD:
2345 return ORDERED_EXPR;
2346 case COMPCODE_UNORD:
2347 return UNORDERED_EXPR;
2348 case COMPCODE_UNLT:
2349 return UNLT_EXPR;
2350 case COMPCODE_UNEQ:
2351 return UNEQ_EXPR;
2352 case COMPCODE_UNLE:
2353 return UNLE_EXPR;
2354 case COMPCODE_UNGT:
2355 return UNGT_EXPR;
2356 case COMPCODE_LTGT:
2357 return LTGT_EXPR;
2358 case COMPCODE_UNGE:
2359 return UNGE_EXPR;
2360 default:
2361 gcc_unreachable ();
2365 /* Return a tree for the comparison which is the combination of
2366 doing the AND or OR (depending on CODE) of the two operations LCODE
2367 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2368 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2369 if this makes the transformation invalid. */
2371 tree
2372 combine_comparisons (location_t loc,
2373 enum tree_code code, enum tree_code lcode,
2374 enum tree_code rcode, tree truth_type,
2375 tree ll_arg, tree lr_arg)
2377 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2378 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2379 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2380 int compcode;
2382 switch (code)
2384 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2385 compcode = lcompcode & rcompcode;
2386 break;
2388 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2389 compcode = lcompcode | rcompcode;
2390 break;
2392 default:
2393 return NULL_TREE;
2396 if (!honor_nans)
2398 /* Eliminate unordered comparisons, as well as LTGT and ORD
2399 which are not used unless the mode has NaNs. */
2400 compcode &= ~COMPCODE_UNORD;
2401 if (compcode == COMPCODE_LTGT)
2402 compcode = COMPCODE_NE;
2403 else if (compcode == COMPCODE_ORD)
2404 compcode = COMPCODE_TRUE;
2406 else if (flag_trapping_math)
2408 /* Check that the original operation and the optimized ones will trap
2409 under the same condition. */
2410 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2411 && (lcompcode != COMPCODE_EQ)
2412 && (lcompcode != COMPCODE_ORD);
2413 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2414 && (rcompcode != COMPCODE_EQ)
2415 && (rcompcode != COMPCODE_ORD);
2416 bool trap = (compcode & COMPCODE_UNORD) == 0
2417 && (compcode != COMPCODE_EQ)
2418 && (compcode != COMPCODE_ORD);
2420 /* In a short-circuited boolean expression the LHS might be
2421 such that the RHS, if evaluated, will never trap. For
2422 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2423 if neither x nor y is NaN. (This is a mixed blessing: for
2424 example, the expression above will never trap, hence
2425 optimizing it to x < y would be invalid). */
2426 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2427 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2428 rtrap = false;
2430 /* If the comparison was short-circuited, and only the RHS
2431 trapped, we may now generate a spurious trap. */
2432 if (rtrap && !ltrap
2433 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2434 return NULL_TREE;
2436 /* If we changed the conditions that cause a trap, we lose. */
2437 if ((ltrap || rtrap) != trap)
2438 return NULL_TREE;
2441 if (compcode == COMPCODE_TRUE)
2442 return constant_boolean_node (true, truth_type);
2443 else if (compcode == COMPCODE_FALSE)
2444 return constant_boolean_node (false, truth_type);
2445 else
2447 enum tree_code tcode;
2449 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2450 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2454 /* Return nonzero if two operands (typically of the same tree node)
2455 are necessarily equal. If either argument has side-effects this
2456 function returns zero. FLAGS modifies behavior as follows:
2458 If OEP_ONLY_CONST is set, only return nonzero for constants.
2459 This function tests whether the operands are indistinguishable;
2460 it does not test whether they are equal using C's == operation.
2461 The distinction is important for IEEE floating point, because
2462 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2463 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2465 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2466 even though it may hold multiple values during a function.
2467 This is because a GCC tree node guarantees that nothing else is
2468 executed between the evaluation of its "operands" (which may often
2469 be evaluated in arbitrary order). Hence if the operands themselves
2470 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2471 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2472 unset means assuming isochronic (or instantaneous) tree equivalence.
2473 Unless comparing arbitrary expression trees, such as from different
2474 statements, this flag can usually be left unset.
2476 If OEP_PURE_SAME is set, then pure functions with identical arguments
2477 are considered the same. It is used when the caller has other ways
2478 to ensure that global memory is unchanged in between. */
2481 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2483 /* If either is ERROR_MARK, they aren't equal. */
2484 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2485 || TREE_TYPE (arg0) == error_mark_node
2486 || TREE_TYPE (arg1) == error_mark_node)
2487 return 0;
2489 /* Similar, if either does not have a type (like a released SSA name),
2490 they aren't equal. */
2491 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2492 return 0;
2494 /* Check equality of integer constants before bailing out due to
2495 precision differences. */
2496 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2497 return tree_int_cst_equal (arg0, arg1);
2499 /* If both types don't have the same signedness, then we can't consider
2500 them equal. We must check this before the STRIP_NOPS calls
2501 because they may change the signedness of the arguments. As pointers
2502 strictly don't have a signedness, require either two pointers or
2503 two non-pointers as well. */
2504 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2505 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2506 return 0;
2508 /* We cannot consider pointers to different address space equal. */
2509 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2510 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2511 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2512 return 0;
2514 /* If both types don't have the same precision, then it is not safe
2515 to strip NOPs. */
2516 if (element_precision (TREE_TYPE (arg0))
2517 != element_precision (TREE_TYPE (arg1)))
2518 return 0;
2520 STRIP_NOPS (arg0);
2521 STRIP_NOPS (arg1);
2523 /* In case both args are comparisons but with different comparison
2524 code, try to swap the comparison operands of one arg to produce
2525 a match and compare that variant. */
2526 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2527 && COMPARISON_CLASS_P (arg0)
2528 && COMPARISON_CLASS_P (arg1))
2530 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2532 if (TREE_CODE (arg0) == swap_code)
2533 return operand_equal_p (TREE_OPERAND (arg0, 0),
2534 TREE_OPERAND (arg1, 1), flags)
2535 && operand_equal_p (TREE_OPERAND (arg0, 1),
2536 TREE_OPERAND (arg1, 0), flags);
2539 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2540 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2541 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2542 return 0;
2544 /* This is needed for conversions and for COMPONENT_REF.
2545 Might as well play it safe and always test this. */
2546 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2547 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2548 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2549 return 0;
2551 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2552 We don't care about side effects in that case because the SAVE_EXPR
2553 takes care of that for us. In all other cases, two expressions are
2554 equal if they have no side effects. If we have two identical
2555 expressions with side effects that should be treated the same due
2556 to the only side effects being identical SAVE_EXPR's, that will
2557 be detected in the recursive calls below.
2558 If we are taking an invariant address of two identical objects
2559 they are necessarily equal as well. */
2560 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2561 && (TREE_CODE (arg0) == SAVE_EXPR
2562 || (flags & OEP_CONSTANT_ADDRESS_OF)
2563 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2564 return 1;
2566 /* Next handle constant cases, those for which we can return 1 even
2567 if ONLY_CONST is set. */
2568 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2569 switch (TREE_CODE (arg0))
2571 case INTEGER_CST:
2572 return tree_int_cst_equal (arg0, arg1);
2574 case FIXED_CST:
2575 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2576 TREE_FIXED_CST (arg1));
2578 case REAL_CST:
2579 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2580 TREE_REAL_CST (arg1)))
2581 return 1;
2584 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2586 /* If we do not distinguish between signed and unsigned zero,
2587 consider them equal. */
2588 if (real_zerop (arg0) && real_zerop (arg1))
2589 return 1;
2591 return 0;
2593 case VECTOR_CST:
2595 unsigned i;
2597 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2598 return 0;
2600 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2602 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2603 VECTOR_CST_ELT (arg1, i), flags))
2604 return 0;
2606 return 1;
2609 case COMPLEX_CST:
2610 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2611 flags)
2612 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2613 flags));
2615 case STRING_CST:
2616 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2617 && ! memcmp (TREE_STRING_POINTER (arg0),
2618 TREE_STRING_POINTER (arg1),
2619 TREE_STRING_LENGTH (arg0)));
2621 case ADDR_EXPR:
2622 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2623 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2624 ? OEP_CONSTANT_ADDRESS_OF : 0);
2625 default:
2626 break;
2629 if (flags & OEP_ONLY_CONST)
2630 return 0;
2632 /* Define macros to test an operand from arg0 and arg1 for equality and a
2633 variant that allows null and views null as being different from any
2634 non-null value. In the latter case, if either is null, the both
2635 must be; otherwise, do the normal comparison. */
2636 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2637 TREE_OPERAND (arg1, N), flags)
2639 #define OP_SAME_WITH_NULL(N) \
2640 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2641 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2643 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2645 case tcc_unary:
2646 /* Two conversions are equal only if signedness and modes match. */
2647 switch (TREE_CODE (arg0))
2649 CASE_CONVERT:
2650 case FIX_TRUNC_EXPR:
2651 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2652 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2653 return 0;
2654 break;
2655 default:
2656 break;
2659 return OP_SAME (0);
2662 case tcc_comparison:
2663 case tcc_binary:
2664 if (OP_SAME (0) && OP_SAME (1))
2665 return 1;
2667 /* For commutative ops, allow the other order. */
2668 return (commutative_tree_code (TREE_CODE (arg0))
2669 && operand_equal_p (TREE_OPERAND (arg0, 0),
2670 TREE_OPERAND (arg1, 1), flags)
2671 && operand_equal_p (TREE_OPERAND (arg0, 1),
2672 TREE_OPERAND (arg1, 0), flags));
2674 case tcc_reference:
2675 /* If either of the pointer (or reference) expressions we are
2676 dereferencing contain a side effect, these cannot be equal,
2677 but their addresses can be. */
2678 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2679 && (TREE_SIDE_EFFECTS (arg0)
2680 || TREE_SIDE_EFFECTS (arg1)))
2681 return 0;
2683 switch (TREE_CODE (arg0))
2685 case INDIRECT_REF:
2686 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2687 return OP_SAME (0);
2689 case REALPART_EXPR:
2690 case IMAGPART_EXPR:
2691 return OP_SAME (0);
2693 case TARGET_MEM_REF:
2694 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2695 /* Require equal extra operands and then fall through to MEM_REF
2696 handling of the two common operands. */
2697 if (!OP_SAME_WITH_NULL (2)
2698 || !OP_SAME_WITH_NULL (3)
2699 || !OP_SAME_WITH_NULL (4))
2700 return 0;
2701 /* Fallthru. */
2702 case MEM_REF:
2703 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2704 /* Require equal access sizes, and similar pointer types.
2705 We can have incomplete types for array references of
2706 variable-sized arrays from the Fortran frontend
2707 though. Also verify the types are compatible. */
2708 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2709 || (TYPE_SIZE (TREE_TYPE (arg0))
2710 && TYPE_SIZE (TREE_TYPE (arg1))
2711 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2712 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2713 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2714 && alias_ptr_types_compatible_p
2715 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2716 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2717 && OP_SAME (0) && OP_SAME (1));
2719 case ARRAY_REF:
2720 case ARRAY_RANGE_REF:
2721 /* Operands 2 and 3 may be null.
2722 Compare the array index by value if it is constant first as we
2723 may have different types but same value here. */
2724 if (!OP_SAME (0))
2725 return 0;
2726 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2727 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2728 TREE_OPERAND (arg1, 1))
2729 || OP_SAME (1))
2730 && OP_SAME_WITH_NULL (2)
2731 && OP_SAME_WITH_NULL (3));
2733 case COMPONENT_REF:
2734 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2735 may be NULL when we're called to compare MEM_EXPRs. */
2736 if (!OP_SAME_WITH_NULL (0)
2737 || !OP_SAME (1))
2738 return 0;
2739 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2740 return OP_SAME_WITH_NULL (2);
2742 case BIT_FIELD_REF:
2743 if (!OP_SAME (0))
2744 return 0;
2745 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2746 return OP_SAME (1) && OP_SAME (2);
2748 default:
2749 return 0;
2752 case tcc_expression:
2753 switch (TREE_CODE (arg0))
2755 case ADDR_EXPR:
2756 case TRUTH_NOT_EXPR:
2757 return OP_SAME (0);
2759 case TRUTH_ANDIF_EXPR:
2760 case TRUTH_ORIF_EXPR:
2761 return OP_SAME (0) && OP_SAME (1);
2763 case FMA_EXPR:
2764 case WIDEN_MULT_PLUS_EXPR:
2765 case WIDEN_MULT_MINUS_EXPR:
2766 if (!OP_SAME (2))
2767 return 0;
2768 /* The multiplcation operands are commutative. */
2769 /* FALLTHRU */
2771 case TRUTH_AND_EXPR:
2772 case TRUTH_OR_EXPR:
2773 case TRUTH_XOR_EXPR:
2774 if (OP_SAME (0) && OP_SAME (1))
2775 return 1;
2777 /* Otherwise take into account this is a commutative operation. */
2778 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2779 TREE_OPERAND (arg1, 1), flags)
2780 && operand_equal_p (TREE_OPERAND (arg0, 1),
2781 TREE_OPERAND (arg1, 0), flags));
2783 case COND_EXPR:
2784 case VEC_COND_EXPR:
2785 case DOT_PROD_EXPR:
2786 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2788 default:
2789 return 0;
2792 case tcc_vl_exp:
2793 switch (TREE_CODE (arg0))
2795 case CALL_EXPR:
2796 /* If the CALL_EXPRs call different functions, then they
2797 clearly can not be equal. */
2798 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2799 flags))
2800 return 0;
2803 unsigned int cef = call_expr_flags (arg0);
2804 if (flags & OEP_PURE_SAME)
2805 cef &= ECF_CONST | ECF_PURE;
2806 else
2807 cef &= ECF_CONST;
2808 if (!cef)
2809 return 0;
2812 /* Now see if all the arguments are the same. */
2814 const_call_expr_arg_iterator iter0, iter1;
2815 const_tree a0, a1;
2816 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2817 a1 = first_const_call_expr_arg (arg1, &iter1);
2818 a0 && a1;
2819 a0 = next_const_call_expr_arg (&iter0),
2820 a1 = next_const_call_expr_arg (&iter1))
2821 if (! operand_equal_p (a0, a1, flags))
2822 return 0;
2824 /* If we get here and both argument lists are exhausted
2825 then the CALL_EXPRs are equal. */
2826 return ! (a0 || a1);
2828 default:
2829 return 0;
2832 case tcc_declaration:
2833 /* Consider __builtin_sqrt equal to sqrt. */
2834 return (TREE_CODE (arg0) == FUNCTION_DECL
2835 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2836 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2837 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2839 default:
2840 return 0;
2843 #undef OP_SAME
2844 #undef OP_SAME_WITH_NULL
2847 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2848 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2850 When in doubt, return 0. */
2852 static int
2853 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2855 int unsignedp1, unsignedpo;
2856 tree primarg0, primarg1, primother;
2857 unsigned int correct_width;
2859 if (operand_equal_p (arg0, arg1, 0))
2860 return 1;
2862 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2863 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2864 return 0;
2866 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2867 and see if the inner values are the same. This removes any
2868 signedness comparison, which doesn't matter here. */
2869 primarg0 = arg0, primarg1 = arg1;
2870 STRIP_NOPS (primarg0);
2871 STRIP_NOPS (primarg1);
2872 if (operand_equal_p (primarg0, primarg1, 0))
2873 return 1;
2875 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2876 actual comparison operand, ARG0.
2878 First throw away any conversions to wider types
2879 already present in the operands. */
2881 primarg1 = get_narrower (arg1, &unsignedp1);
2882 primother = get_narrower (other, &unsignedpo);
2884 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2885 if (unsignedp1 == unsignedpo
2886 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2887 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2889 tree type = TREE_TYPE (arg0);
2891 /* Make sure shorter operand is extended the right way
2892 to match the longer operand. */
2893 primarg1 = fold_convert (signed_or_unsigned_type_for
2894 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2896 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2897 return 1;
2900 return 0;
2903 /* See if ARG is an expression that is either a comparison or is performing
2904 arithmetic on comparisons. The comparisons must only be comparing
2905 two different values, which will be stored in *CVAL1 and *CVAL2; if
2906 they are nonzero it means that some operands have already been found.
2907 No variables may be used anywhere else in the expression except in the
2908 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2909 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2911 If this is true, return 1. Otherwise, return zero. */
2913 static int
2914 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2916 enum tree_code code = TREE_CODE (arg);
2917 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2919 /* We can handle some of the tcc_expression cases here. */
2920 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2921 tclass = tcc_unary;
2922 else if (tclass == tcc_expression
2923 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2924 || code == COMPOUND_EXPR))
2925 tclass = tcc_binary;
2927 else if (tclass == tcc_expression && code == SAVE_EXPR
2928 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2930 /* If we've already found a CVAL1 or CVAL2, this expression is
2931 two complex to handle. */
2932 if (*cval1 || *cval2)
2933 return 0;
2935 tclass = tcc_unary;
2936 *save_p = 1;
2939 switch (tclass)
2941 case tcc_unary:
2942 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2944 case tcc_binary:
2945 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2946 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2947 cval1, cval2, save_p));
2949 case tcc_constant:
2950 return 1;
2952 case tcc_expression:
2953 if (code == COND_EXPR)
2954 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2955 cval1, cval2, save_p)
2956 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2957 cval1, cval2, save_p)
2958 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2959 cval1, cval2, save_p));
2960 return 0;
2962 case tcc_comparison:
2963 /* First see if we can handle the first operand, then the second. For
2964 the second operand, we know *CVAL1 can't be zero. It must be that
2965 one side of the comparison is each of the values; test for the
2966 case where this isn't true by failing if the two operands
2967 are the same. */
2969 if (operand_equal_p (TREE_OPERAND (arg, 0),
2970 TREE_OPERAND (arg, 1), 0))
2971 return 0;
2973 if (*cval1 == 0)
2974 *cval1 = TREE_OPERAND (arg, 0);
2975 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2977 else if (*cval2 == 0)
2978 *cval2 = TREE_OPERAND (arg, 0);
2979 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2981 else
2982 return 0;
2984 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2986 else if (*cval2 == 0)
2987 *cval2 = TREE_OPERAND (arg, 1);
2988 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2990 else
2991 return 0;
2993 return 1;
2995 default:
2996 return 0;
3000 /* ARG is a tree that is known to contain just arithmetic operations and
3001 comparisons. Evaluate the operations in the tree substituting NEW0 for
3002 any occurrence of OLD0 as an operand of a comparison and likewise for
3003 NEW1 and OLD1. */
3005 static tree
3006 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3007 tree old1, tree new1)
3009 tree type = TREE_TYPE (arg);
3010 enum tree_code code = TREE_CODE (arg);
3011 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3013 /* We can handle some of the tcc_expression cases here. */
3014 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3015 tclass = tcc_unary;
3016 else if (tclass == tcc_expression
3017 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3018 tclass = tcc_binary;
3020 switch (tclass)
3022 case tcc_unary:
3023 return fold_build1_loc (loc, code, type,
3024 eval_subst (loc, TREE_OPERAND (arg, 0),
3025 old0, new0, old1, new1));
3027 case tcc_binary:
3028 return fold_build2_loc (loc, code, type,
3029 eval_subst (loc, TREE_OPERAND (arg, 0),
3030 old0, new0, old1, new1),
3031 eval_subst (loc, TREE_OPERAND (arg, 1),
3032 old0, new0, old1, new1));
3034 case tcc_expression:
3035 switch (code)
3037 case SAVE_EXPR:
3038 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3039 old1, new1);
3041 case COMPOUND_EXPR:
3042 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3043 old1, new1);
3045 case COND_EXPR:
3046 return fold_build3_loc (loc, code, type,
3047 eval_subst (loc, TREE_OPERAND (arg, 0),
3048 old0, new0, old1, new1),
3049 eval_subst (loc, TREE_OPERAND (arg, 1),
3050 old0, new0, old1, new1),
3051 eval_subst (loc, TREE_OPERAND (arg, 2),
3052 old0, new0, old1, new1));
3053 default:
3054 break;
3056 /* Fall through - ??? */
3058 case tcc_comparison:
3060 tree arg0 = TREE_OPERAND (arg, 0);
3061 tree arg1 = TREE_OPERAND (arg, 1);
3063 /* We need to check both for exact equality and tree equality. The
3064 former will be true if the operand has a side-effect. In that
3065 case, we know the operand occurred exactly once. */
3067 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3068 arg0 = new0;
3069 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3070 arg0 = new1;
3072 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3073 arg1 = new0;
3074 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3075 arg1 = new1;
3077 return fold_build2_loc (loc, code, type, arg0, arg1);
3080 default:
3081 return arg;
3085 /* Return a tree for the case when the result of an expression is RESULT
3086 converted to TYPE and OMITTED was previously an operand of the expression
3087 but is now not needed (e.g., we folded OMITTED * 0).
3089 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3090 the conversion of RESULT to TYPE. */
3092 tree
3093 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3095 tree t = fold_convert_loc (loc, type, result);
3097 /* If the resulting operand is an empty statement, just return the omitted
3098 statement casted to void. */
3099 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3100 return build1_loc (loc, NOP_EXPR, void_type_node,
3101 fold_ignored_result (omitted));
3103 if (TREE_SIDE_EFFECTS (omitted))
3104 return build2_loc (loc, COMPOUND_EXPR, type,
3105 fold_ignored_result (omitted), t);
3107 return non_lvalue_loc (loc, t);
3110 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3112 static tree
3113 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3114 tree omitted)
3116 tree t = fold_convert_loc (loc, type, result);
3118 /* If the resulting operand is an empty statement, just return the omitted
3119 statement casted to void. */
3120 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3121 return build1_loc (loc, NOP_EXPR, void_type_node,
3122 fold_ignored_result (omitted));
3124 if (TREE_SIDE_EFFECTS (omitted))
3125 return build2_loc (loc, COMPOUND_EXPR, type,
3126 fold_ignored_result (omitted), t);
3128 return pedantic_non_lvalue_loc (loc, t);
3131 /* Return a tree for the case when the result of an expression is RESULT
3132 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3133 of the expression but are now not needed.
3135 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3136 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3137 evaluated before OMITTED2. Otherwise, if neither has side effects,
3138 just do the conversion of RESULT to TYPE. */
3140 tree
3141 omit_two_operands_loc (location_t loc, tree type, tree result,
3142 tree omitted1, tree omitted2)
3144 tree t = fold_convert_loc (loc, type, result);
3146 if (TREE_SIDE_EFFECTS (omitted2))
3147 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3148 if (TREE_SIDE_EFFECTS (omitted1))
3149 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3151 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3155 /* Return a simplified tree node for the truth-negation of ARG. This
3156 never alters ARG itself. We assume that ARG is an operation that
3157 returns a truth value (0 or 1).
3159 FIXME: one would think we would fold the result, but it causes
3160 problems with the dominator optimizer. */
3162 static tree
3163 fold_truth_not_expr (location_t loc, tree arg)
3165 tree type = TREE_TYPE (arg);
3166 enum tree_code code = TREE_CODE (arg);
3167 location_t loc1, loc2;
3169 /* If this is a comparison, we can simply invert it, except for
3170 floating-point non-equality comparisons, in which case we just
3171 enclose a TRUTH_NOT_EXPR around what we have. */
3173 if (TREE_CODE_CLASS (code) == tcc_comparison)
3175 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3176 if (FLOAT_TYPE_P (op_type)
3177 && flag_trapping_math
3178 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3179 && code != NE_EXPR && code != EQ_EXPR)
3180 return NULL_TREE;
3182 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3183 if (code == ERROR_MARK)
3184 return NULL_TREE;
3186 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3187 TREE_OPERAND (arg, 1));
3190 switch (code)
3192 case INTEGER_CST:
3193 return constant_boolean_node (integer_zerop (arg), type);
3195 case TRUTH_AND_EXPR:
3196 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3197 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3198 return build2_loc (loc, TRUTH_OR_EXPR, type,
3199 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3200 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3202 case TRUTH_OR_EXPR:
3203 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3204 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3205 return build2_loc (loc, TRUTH_AND_EXPR, type,
3206 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3207 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3209 case TRUTH_XOR_EXPR:
3210 /* Here we can invert either operand. We invert the first operand
3211 unless the second operand is a TRUTH_NOT_EXPR in which case our
3212 result is the XOR of the first operand with the inside of the
3213 negation of the second operand. */
3215 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3216 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3217 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3218 else
3219 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3220 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3221 TREE_OPERAND (arg, 1));
3223 case TRUTH_ANDIF_EXPR:
3224 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3225 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3226 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3227 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3228 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3230 case TRUTH_ORIF_EXPR:
3231 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3232 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3233 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3234 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3235 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3237 case TRUTH_NOT_EXPR:
3238 return TREE_OPERAND (arg, 0);
3240 case COND_EXPR:
3242 tree arg1 = TREE_OPERAND (arg, 1);
3243 tree arg2 = TREE_OPERAND (arg, 2);
3245 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3246 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3248 /* A COND_EXPR may have a throw as one operand, which
3249 then has void type. Just leave void operands
3250 as they are. */
3251 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3252 VOID_TYPE_P (TREE_TYPE (arg1))
3253 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3254 VOID_TYPE_P (TREE_TYPE (arg2))
3255 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3258 case COMPOUND_EXPR:
3259 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3260 return build2_loc (loc, COMPOUND_EXPR, type,
3261 TREE_OPERAND (arg, 0),
3262 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3264 case NON_LVALUE_EXPR:
3265 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3266 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3268 CASE_CONVERT:
3269 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3270 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3272 /* ... fall through ... */
3274 case FLOAT_EXPR:
3275 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3276 return build1_loc (loc, TREE_CODE (arg), type,
3277 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3279 case BIT_AND_EXPR:
3280 if (!integer_onep (TREE_OPERAND (arg, 1)))
3281 return NULL_TREE;
3282 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3284 case SAVE_EXPR:
3285 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3287 case CLEANUP_POINT_EXPR:
3288 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3289 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3290 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3292 default:
3293 return NULL_TREE;
3297 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3298 assume that ARG is an operation that returns a truth value (0 or 1
3299 for scalars, 0 or -1 for vectors). Return the folded expression if
3300 folding is successful. Otherwise, return NULL_TREE. */
3302 static tree
3303 fold_invert_truthvalue (location_t loc, tree arg)
3305 tree type = TREE_TYPE (arg);
3306 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3307 ? BIT_NOT_EXPR
3308 : TRUTH_NOT_EXPR,
3309 type, arg);
3312 /* Return a simplified tree node for the truth-negation of ARG. This
3313 never alters ARG itself. We assume that ARG is an operation that
3314 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3316 tree
3317 invert_truthvalue_loc (location_t loc, tree arg)
3319 if (TREE_CODE (arg) == ERROR_MARK)
3320 return arg;
3322 tree type = TREE_TYPE (arg);
3323 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3324 ? BIT_NOT_EXPR
3325 : TRUTH_NOT_EXPR,
3326 type, arg);
3329 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3330 operands are another bit-wise operation with a common input. If so,
3331 distribute the bit operations to save an operation and possibly two if
3332 constants are involved. For example, convert
3333 (A | B) & (A | C) into A | (B & C)
3334 Further simplification will occur if B and C are constants.
3336 If this optimization cannot be done, 0 will be returned. */
3338 static tree
3339 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3340 tree arg0, tree arg1)
3342 tree common;
3343 tree left, right;
3345 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3346 || TREE_CODE (arg0) == code
3347 || (TREE_CODE (arg0) != BIT_AND_EXPR
3348 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3349 return 0;
3351 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3353 common = TREE_OPERAND (arg0, 0);
3354 left = TREE_OPERAND (arg0, 1);
3355 right = TREE_OPERAND (arg1, 1);
3357 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3359 common = TREE_OPERAND (arg0, 0);
3360 left = TREE_OPERAND (arg0, 1);
3361 right = TREE_OPERAND (arg1, 0);
3363 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3365 common = TREE_OPERAND (arg0, 1);
3366 left = TREE_OPERAND (arg0, 0);
3367 right = TREE_OPERAND (arg1, 1);
3369 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3371 common = TREE_OPERAND (arg0, 1);
3372 left = TREE_OPERAND (arg0, 0);
3373 right = TREE_OPERAND (arg1, 0);
3375 else
3376 return 0;
3378 common = fold_convert_loc (loc, type, common);
3379 left = fold_convert_loc (loc, type, left);
3380 right = fold_convert_loc (loc, type, right);
3381 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3382 fold_build2_loc (loc, code, type, left, right));
3385 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3386 with code CODE. This optimization is unsafe. */
3387 static tree
3388 distribute_real_division (location_t loc, enum tree_code code, tree type,
3389 tree arg0, tree arg1)
3391 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3392 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3394 /* (A / C) +- (B / C) -> (A +- B) / C. */
3395 if (mul0 == mul1
3396 && operand_equal_p (TREE_OPERAND (arg0, 1),
3397 TREE_OPERAND (arg1, 1), 0))
3398 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3399 fold_build2_loc (loc, code, type,
3400 TREE_OPERAND (arg0, 0),
3401 TREE_OPERAND (arg1, 0)),
3402 TREE_OPERAND (arg0, 1));
3404 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3405 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3406 TREE_OPERAND (arg1, 0), 0)
3407 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3408 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3410 REAL_VALUE_TYPE r0, r1;
3411 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3412 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3413 if (!mul0)
3414 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3415 if (!mul1)
3416 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3417 real_arithmetic (&r0, code, &r0, &r1);
3418 return fold_build2_loc (loc, MULT_EXPR, type,
3419 TREE_OPERAND (arg0, 0),
3420 build_real (type, r0));
3423 return NULL_TREE;
3426 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3427 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3429 static tree
3430 make_bit_field_ref (location_t loc, tree inner, tree type,
3431 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3433 tree result, bftype;
3435 if (bitpos == 0)
3437 tree size = TYPE_SIZE (TREE_TYPE (inner));
3438 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3439 || POINTER_TYPE_P (TREE_TYPE (inner)))
3440 && tree_fits_shwi_p (size)
3441 && tree_to_shwi (size) == bitsize)
3442 return fold_convert_loc (loc, type, inner);
3445 bftype = type;
3446 if (TYPE_PRECISION (bftype) != bitsize
3447 || TYPE_UNSIGNED (bftype) == !unsignedp)
3448 bftype = build_nonstandard_integer_type (bitsize, 0);
3450 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3451 size_int (bitsize), bitsize_int (bitpos));
3453 if (bftype != type)
3454 result = fold_convert_loc (loc, type, result);
3456 return result;
3459 /* Optimize a bit-field compare.
3461 There are two cases: First is a compare against a constant and the
3462 second is a comparison of two items where the fields are at the same
3463 bit position relative to the start of a chunk (byte, halfword, word)
3464 large enough to contain it. In these cases we can avoid the shift
3465 implicit in bitfield extractions.
3467 For constants, we emit a compare of the shifted constant with the
3468 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3469 compared. For two fields at the same position, we do the ANDs with the
3470 similar mask and compare the result of the ANDs.
3472 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3473 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3474 are the left and right operands of the comparison, respectively.
3476 If the optimization described above can be done, we return the resulting
3477 tree. Otherwise we return zero. */
3479 static tree
3480 optimize_bit_field_compare (location_t loc, enum tree_code code,
3481 tree compare_type, tree lhs, tree rhs)
3483 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3484 tree type = TREE_TYPE (lhs);
3485 tree signed_type, unsigned_type;
3486 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3487 enum machine_mode lmode, rmode, nmode;
3488 int lunsignedp, runsignedp;
3489 int lvolatilep = 0, rvolatilep = 0;
3490 tree linner, rinner = NULL_TREE;
3491 tree mask;
3492 tree offset;
3494 /* Get all the information about the extractions being done. If the bit size
3495 if the same as the size of the underlying object, we aren't doing an
3496 extraction at all and so can do nothing. We also don't want to
3497 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3498 then will no longer be able to replace it. */
3499 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3500 &lunsignedp, &lvolatilep, false);
3501 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3502 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3503 return 0;
3505 if (!const_p)
3507 /* If this is not a constant, we can only do something if bit positions,
3508 sizes, and signedness are the same. */
3509 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3510 &runsignedp, &rvolatilep, false);
3512 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3513 || lunsignedp != runsignedp || offset != 0
3514 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3515 return 0;
3518 /* See if we can find a mode to refer to this field. We should be able to,
3519 but fail if we can't. */
3520 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3521 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3522 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3523 TYPE_ALIGN (TREE_TYPE (rinner))),
3524 word_mode, false);
3525 if (nmode == VOIDmode)
3526 return 0;
3528 /* Set signed and unsigned types of the precision of this mode for the
3529 shifts below. */
3530 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3531 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3533 /* Compute the bit position and size for the new reference and our offset
3534 within it. If the new reference is the same size as the original, we
3535 won't optimize anything, so return zero. */
3536 nbitsize = GET_MODE_BITSIZE (nmode);
3537 nbitpos = lbitpos & ~ (nbitsize - 1);
3538 lbitpos -= nbitpos;
3539 if (nbitsize == lbitsize)
3540 return 0;
3542 if (BYTES_BIG_ENDIAN)
3543 lbitpos = nbitsize - lbitsize - lbitpos;
3545 /* Make the mask to be used against the extracted field. */
3546 mask = build_int_cst_type (unsigned_type, -1);
3547 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3548 mask = const_binop (RSHIFT_EXPR, mask,
3549 size_int (nbitsize - lbitsize - lbitpos));
3551 if (! const_p)
3552 /* If not comparing with constant, just rework the comparison
3553 and return. */
3554 return fold_build2_loc (loc, code, compare_type,
3555 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3556 make_bit_field_ref (loc, linner,
3557 unsigned_type,
3558 nbitsize, nbitpos,
3560 mask),
3561 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3562 make_bit_field_ref (loc, rinner,
3563 unsigned_type,
3564 nbitsize, nbitpos,
3566 mask));
3568 /* Otherwise, we are handling the constant case. See if the constant is too
3569 big for the field. Warn and return a tree of for 0 (false) if so. We do
3570 this not only for its own sake, but to avoid having to test for this
3571 error case below. If we didn't, we might generate wrong code.
3573 For unsigned fields, the constant shifted right by the field length should
3574 be all zero. For signed fields, the high-order bits should agree with
3575 the sign bit. */
3577 if (lunsignedp)
3579 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3580 fold_convert_loc (loc,
3581 unsigned_type, rhs),
3582 size_int (lbitsize))))
3584 warning (0, "comparison is always %d due to width of bit-field",
3585 code == NE_EXPR);
3586 return constant_boolean_node (code == NE_EXPR, compare_type);
3589 else
3591 tree tem = const_binop (RSHIFT_EXPR,
3592 fold_convert_loc (loc, signed_type, rhs),
3593 size_int (lbitsize - 1));
3594 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3596 warning (0, "comparison is always %d due to width of bit-field",
3597 code == NE_EXPR);
3598 return constant_boolean_node (code == NE_EXPR, compare_type);
3602 /* Single-bit compares should always be against zero. */
3603 if (lbitsize == 1 && ! integer_zerop (rhs))
3605 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3606 rhs = build_int_cst (type, 0);
3609 /* Make a new bitfield reference, shift the constant over the
3610 appropriate number of bits and mask it with the computed mask
3611 (in case this was a signed field). If we changed it, make a new one. */
3612 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3614 rhs = const_binop (BIT_AND_EXPR,
3615 const_binop (LSHIFT_EXPR,
3616 fold_convert_loc (loc, unsigned_type, rhs),
3617 size_int (lbitpos)),
3618 mask);
3620 lhs = build2_loc (loc, code, compare_type,
3621 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3622 return lhs;
3625 /* Subroutine for fold_truth_andor_1: decode a field reference.
3627 If EXP is a comparison reference, we return the innermost reference.
3629 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3630 set to the starting bit number.
3632 If the innermost field can be completely contained in a mode-sized
3633 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3635 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3636 otherwise it is not changed.
3638 *PUNSIGNEDP is set to the signedness of the field.
3640 *PMASK is set to the mask used. This is either contained in a
3641 BIT_AND_EXPR or derived from the width of the field.
3643 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3645 Return 0 if this is not a component reference or is one that we can't
3646 do anything with. */
3648 static tree
3649 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3650 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3651 int *punsignedp, int *pvolatilep,
3652 tree *pmask, tree *pand_mask)
3654 tree outer_type = 0;
3655 tree and_mask = 0;
3656 tree mask, inner, offset;
3657 tree unsigned_type;
3658 unsigned int precision;
3660 /* All the optimizations using this function assume integer fields.
3661 There are problems with FP fields since the type_for_size call
3662 below can fail for, e.g., XFmode. */
3663 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3664 return 0;
3666 /* We are interested in the bare arrangement of bits, so strip everything
3667 that doesn't affect the machine mode. However, record the type of the
3668 outermost expression if it may matter below. */
3669 if (CONVERT_EXPR_P (exp)
3670 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3671 outer_type = TREE_TYPE (exp);
3672 STRIP_NOPS (exp);
3674 if (TREE_CODE (exp) == BIT_AND_EXPR)
3676 and_mask = TREE_OPERAND (exp, 1);
3677 exp = TREE_OPERAND (exp, 0);
3678 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3679 if (TREE_CODE (and_mask) != INTEGER_CST)
3680 return 0;
3683 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3684 punsignedp, pvolatilep, false);
3685 if ((inner == exp && and_mask == 0)
3686 || *pbitsize < 0 || offset != 0
3687 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3688 return 0;
3690 /* If the number of bits in the reference is the same as the bitsize of
3691 the outer type, then the outer type gives the signedness. Otherwise
3692 (in case of a small bitfield) the signedness is unchanged. */
3693 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3694 *punsignedp = TYPE_UNSIGNED (outer_type);
3696 /* Compute the mask to access the bitfield. */
3697 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3698 precision = TYPE_PRECISION (unsigned_type);
3700 mask = build_int_cst_type (unsigned_type, -1);
3702 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3703 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3705 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3706 if (and_mask != 0)
3707 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3708 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3710 *pmask = mask;
3711 *pand_mask = and_mask;
3712 return inner;
3715 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3716 bit positions. */
3718 static int
3719 all_ones_mask_p (const_tree mask, int size)
3721 tree type = TREE_TYPE (mask);
3722 unsigned int precision = TYPE_PRECISION (type);
3723 tree tmask;
3725 tmask = build_int_cst_type (signed_type_for (type), -1);
3727 return
3728 tree_int_cst_equal (mask,
3729 const_binop (RSHIFT_EXPR,
3730 const_binop (LSHIFT_EXPR, tmask,
3731 size_int (precision - size)),
3732 size_int (precision - size)));
3735 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3736 represents the sign bit of EXP's type. If EXP represents a sign
3737 or zero extension, also test VAL against the unextended type.
3738 The return value is the (sub)expression whose sign bit is VAL,
3739 or NULL_TREE otherwise. */
3741 static tree
3742 sign_bit_p (tree exp, const_tree val)
3744 unsigned HOST_WIDE_INT mask_lo, lo;
3745 HOST_WIDE_INT mask_hi, hi;
3746 int width;
3747 tree t;
3749 /* Tree EXP must have an integral type. */
3750 t = TREE_TYPE (exp);
3751 if (! INTEGRAL_TYPE_P (t))
3752 return NULL_TREE;
3754 /* Tree VAL must be an integer constant. */
3755 if (TREE_CODE (val) != INTEGER_CST
3756 || TREE_OVERFLOW (val))
3757 return NULL_TREE;
3759 width = TYPE_PRECISION (t);
3760 if (width > HOST_BITS_PER_WIDE_INT)
3762 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3763 lo = 0;
3765 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3766 mask_lo = -1;
3768 else
3770 hi = 0;
3771 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3773 mask_hi = 0;
3774 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3777 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3778 treat VAL as if it were unsigned. */
3779 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3780 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3781 return exp;
3783 /* Handle extension from a narrower type. */
3784 if (TREE_CODE (exp) == NOP_EXPR
3785 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3786 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3788 return NULL_TREE;
3791 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3792 to be evaluated unconditionally. */
3794 static int
3795 simple_operand_p (const_tree exp)
3797 /* Strip any conversions that don't change the machine mode. */
3798 STRIP_NOPS (exp);
3800 return (CONSTANT_CLASS_P (exp)
3801 || TREE_CODE (exp) == SSA_NAME
3802 || (DECL_P (exp)
3803 && ! TREE_ADDRESSABLE (exp)
3804 && ! TREE_THIS_VOLATILE (exp)
3805 && ! DECL_NONLOCAL (exp)
3806 /* Don't regard global variables as simple. They may be
3807 allocated in ways unknown to the compiler (shared memory,
3808 #pragma weak, etc). */
3809 && ! TREE_PUBLIC (exp)
3810 && ! DECL_EXTERNAL (exp)
3811 /* Weakrefs are not safe to be read, since they can be NULL.
3812 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3813 have DECL_WEAK flag set. */
3814 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3815 /* Loading a static variable is unduly expensive, but global
3816 registers aren't expensive. */
3817 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3820 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3821 to be evaluated unconditionally.
3822 I addition to simple_operand_p, we assume that comparisons, conversions,
3823 and logic-not operations are simple, if their operands are simple, too. */
3825 static bool
3826 simple_operand_p_2 (tree exp)
3828 enum tree_code code;
3830 if (TREE_SIDE_EFFECTS (exp)
3831 || tree_could_trap_p (exp))
3832 return false;
3834 while (CONVERT_EXPR_P (exp))
3835 exp = TREE_OPERAND (exp, 0);
3837 code = TREE_CODE (exp);
3839 if (TREE_CODE_CLASS (code) == tcc_comparison)
3840 return (simple_operand_p (TREE_OPERAND (exp, 0))
3841 && simple_operand_p (TREE_OPERAND (exp, 1)));
3843 if (code == TRUTH_NOT_EXPR)
3844 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3846 return simple_operand_p (exp);
3850 /* The following functions are subroutines to fold_range_test and allow it to
3851 try to change a logical combination of comparisons into a range test.
3853 For example, both
3854 X == 2 || X == 3 || X == 4 || X == 5
3856 X >= 2 && X <= 5
3857 are converted to
3858 (unsigned) (X - 2) <= 3
3860 We describe each set of comparisons as being either inside or outside
3861 a range, using a variable named like IN_P, and then describe the
3862 range with a lower and upper bound. If one of the bounds is omitted,
3863 it represents either the highest or lowest value of the type.
3865 In the comments below, we represent a range by two numbers in brackets
3866 preceded by a "+" to designate being inside that range, or a "-" to
3867 designate being outside that range, so the condition can be inverted by
3868 flipping the prefix. An omitted bound is represented by a "-". For
3869 example, "- [-, 10]" means being outside the range starting at the lowest
3870 possible value and ending at 10, in other words, being greater than 10.
3871 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3872 always false.
3874 We set up things so that the missing bounds are handled in a consistent
3875 manner so neither a missing bound nor "true" and "false" need to be
3876 handled using a special case. */
3878 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3879 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3880 and UPPER1_P are nonzero if the respective argument is an upper bound
3881 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3882 must be specified for a comparison. ARG1 will be converted to ARG0's
3883 type if both are specified. */
3885 static tree
3886 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3887 tree arg1, int upper1_p)
3889 tree tem;
3890 int result;
3891 int sgn0, sgn1;
3893 /* If neither arg represents infinity, do the normal operation.
3894 Else, if not a comparison, return infinity. Else handle the special
3895 comparison rules. Note that most of the cases below won't occur, but
3896 are handled for consistency. */
3898 if (arg0 != 0 && arg1 != 0)
3900 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3901 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3902 STRIP_NOPS (tem);
3903 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3906 if (TREE_CODE_CLASS (code) != tcc_comparison)
3907 return 0;
3909 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3910 for neither. In real maths, we cannot assume open ended ranges are
3911 the same. But, this is computer arithmetic, where numbers are finite.
3912 We can therefore make the transformation of any unbounded range with
3913 the value Z, Z being greater than any representable number. This permits
3914 us to treat unbounded ranges as equal. */
3915 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3916 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3917 switch (code)
3919 case EQ_EXPR:
3920 result = sgn0 == sgn1;
3921 break;
3922 case NE_EXPR:
3923 result = sgn0 != sgn1;
3924 break;
3925 case LT_EXPR:
3926 result = sgn0 < sgn1;
3927 break;
3928 case LE_EXPR:
3929 result = sgn0 <= sgn1;
3930 break;
3931 case GT_EXPR:
3932 result = sgn0 > sgn1;
3933 break;
3934 case GE_EXPR:
3935 result = sgn0 >= sgn1;
3936 break;
3937 default:
3938 gcc_unreachable ();
3941 return constant_boolean_node (result, type);
3944 /* Helper routine for make_range. Perform one step for it, return
3945 new expression if the loop should continue or NULL_TREE if it should
3946 stop. */
3948 tree
3949 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3950 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3951 bool *strict_overflow_p)
3953 tree arg0_type = TREE_TYPE (arg0);
3954 tree n_low, n_high, low = *p_low, high = *p_high;
3955 int in_p = *p_in_p, n_in_p;
3957 switch (code)
3959 case TRUTH_NOT_EXPR:
3960 /* We can only do something if the range is testing for zero. */
3961 if (low == NULL_TREE || high == NULL_TREE
3962 || ! integer_zerop (low) || ! integer_zerop (high))
3963 return NULL_TREE;
3964 *p_in_p = ! in_p;
3965 return arg0;
3967 case EQ_EXPR: case NE_EXPR:
3968 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3969 /* We can only do something if the range is testing for zero
3970 and if the second operand is an integer constant. Note that
3971 saying something is "in" the range we make is done by
3972 complementing IN_P since it will set in the initial case of
3973 being not equal to zero; "out" is leaving it alone. */
3974 if (low == NULL_TREE || high == NULL_TREE
3975 || ! integer_zerop (low) || ! integer_zerop (high)
3976 || TREE_CODE (arg1) != INTEGER_CST)
3977 return NULL_TREE;
3979 switch (code)
3981 case NE_EXPR: /* - [c, c] */
3982 low = high = arg1;
3983 break;
3984 case EQ_EXPR: /* + [c, c] */
3985 in_p = ! in_p, low = high = arg1;
3986 break;
3987 case GT_EXPR: /* - [-, c] */
3988 low = 0, high = arg1;
3989 break;
3990 case GE_EXPR: /* + [c, -] */
3991 in_p = ! in_p, low = arg1, high = 0;
3992 break;
3993 case LT_EXPR: /* - [c, -] */
3994 low = arg1, high = 0;
3995 break;
3996 case LE_EXPR: /* + [-, c] */
3997 in_p = ! in_p, low = 0, high = arg1;
3998 break;
3999 default:
4000 gcc_unreachable ();
4003 /* If this is an unsigned comparison, we also know that EXP is
4004 greater than or equal to zero. We base the range tests we make
4005 on that fact, so we record it here so we can parse existing
4006 range tests. We test arg0_type since often the return type
4007 of, e.g. EQ_EXPR, is boolean. */
4008 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4010 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4011 in_p, low, high, 1,
4012 build_int_cst (arg0_type, 0),
4013 NULL_TREE))
4014 return NULL_TREE;
4016 in_p = n_in_p, low = n_low, high = n_high;
4018 /* If the high bound is missing, but we have a nonzero low
4019 bound, reverse the range so it goes from zero to the low bound
4020 minus 1. */
4021 if (high == 0 && low && ! integer_zerop (low))
4023 in_p = ! in_p;
4024 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4025 integer_one_node, 0);
4026 low = build_int_cst (arg0_type, 0);
4030 *p_low = low;
4031 *p_high = high;
4032 *p_in_p = in_p;
4033 return arg0;
4035 case NEGATE_EXPR:
4036 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4037 low and high are non-NULL, then normalize will DTRT. */
4038 if (!TYPE_UNSIGNED (arg0_type)
4039 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4041 if (low == NULL_TREE)
4042 low = TYPE_MIN_VALUE (arg0_type);
4043 if (high == NULL_TREE)
4044 high = TYPE_MAX_VALUE (arg0_type);
4047 /* (-x) IN [a,b] -> x in [-b, -a] */
4048 n_low = range_binop (MINUS_EXPR, exp_type,
4049 build_int_cst (exp_type, 0),
4050 0, high, 1);
4051 n_high = range_binop (MINUS_EXPR, exp_type,
4052 build_int_cst (exp_type, 0),
4053 0, low, 0);
4054 if (n_high != 0 && TREE_OVERFLOW (n_high))
4055 return NULL_TREE;
4056 goto normalize;
4058 case BIT_NOT_EXPR:
4059 /* ~ X -> -X - 1 */
4060 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4061 build_int_cst (exp_type, 1));
4063 case PLUS_EXPR:
4064 case MINUS_EXPR:
4065 if (TREE_CODE (arg1) != INTEGER_CST)
4066 return NULL_TREE;
4068 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4069 move a constant to the other side. */
4070 if (!TYPE_UNSIGNED (arg0_type)
4071 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4072 return NULL_TREE;
4074 /* If EXP is signed, any overflow in the computation is undefined,
4075 so we don't worry about it so long as our computations on
4076 the bounds don't overflow. For unsigned, overflow is defined
4077 and this is exactly the right thing. */
4078 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4079 arg0_type, low, 0, arg1, 0);
4080 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4081 arg0_type, high, 1, arg1, 0);
4082 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4083 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4084 return NULL_TREE;
4086 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4087 *strict_overflow_p = true;
4089 normalize:
4090 /* Check for an unsigned range which has wrapped around the maximum
4091 value thus making n_high < n_low, and normalize it. */
4092 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4094 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4095 integer_one_node, 0);
4096 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4097 integer_one_node, 0);
4099 /* If the range is of the form +/- [ x+1, x ], we won't
4100 be able to normalize it. But then, it represents the
4101 whole range or the empty set, so make it
4102 +/- [ -, - ]. */
4103 if (tree_int_cst_equal (n_low, low)
4104 && tree_int_cst_equal (n_high, high))
4105 low = high = 0;
4106 else
4107 in_p = ! in_p;
4109 else
4110 low = n_low, high = n_high;
4112 *p_low = low;
4113 *p_high = high;
4114 *p_in_p = in_p;
4115 return arg0;
4117 CASE_CONVERT:
4118 case NON_LVALUE_EXPR:
4119 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4120 return NULL_TREE;
4122 if (! INTEGRAL_TYPE_P (arg0_type)
4123 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4124 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4125 return NULL_TREE;
4127 n_low = low, n_high = high;
4129 if (n_low != 0)
4130 n_low = fold_convert_loc (loc, arg0_type, n_low);
4132 if (n_high != 0)
4133 n_high = fold_convert_loc (loc, arg0_type, n_high);
4135 /* If we're converting arg0 from an unsigned type, to exp,
4136 a signed type, we will be doing the comparison as unsigned.
4137 The tests above have already verified that LOW and HIGH
4138 are both positive.
4140 So we have to ensure that we will handle large unsigned
4141 values the same way that the current signed bounds treat
4142 negative values. */
4144 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4146 tree high_positive;
4147 tree equiv_type;
4148 /* For fixed-point modes, we need to pass the saturating flag
4149 as the 2nd parameter. */
4150 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4151 equiv_type
4152 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4153 TYPE_SATURATING (arg0_type));
4154 else
4155 equiv_type
4156 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4158 /* A range without an upper bound is, naturally, unbounded.
4159 Since convert would have cropped a very large value, use
4160 the max value for the destination type. */
4161 high_positive
4162 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4163 : TYPE_MAX_VALUE (arg0_type);
4165 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4166 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4167 fold_convert_loc (loc, arg0_type,
4168 high_positive),
4169 build_int_cst (arg0_type, 1));
4171 /* If the low bound is specified, "and" the range with the
4172 range for which the original unsigned value will be
4173 positive. */
4174 if (low != 0)
4176 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4177 1, fold_convert_loc (loc, arg0_type,
4178 integer_zero_node),
4179 high_positive))
4180 return NULL_TREE;
4182 in_p = (n_in_p == in_p);
4184 else
4186 /* Otherwise, "or" the range with the range of the input
4187 that will be interpreted as negative. */
4188 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4189 1, fold_convert_loc (loc, arg0_type,
4190 integer_zero_node),
4191 high_positive))
4192 return NULL_TREE;
4194 in_p = (in_p != n_in_p);
4198 *p_low = n_low;
4199 *p_high = n_high;
4200 *p_in_p = in_p;
4201 return arg0;
4203 default:
4204 return NULL_TREE;
4208 /* Given EXP, a logical expression, set the range it is testing into
4209 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4210 actually being tested. *PLOW and *PHIGH will be made of the same
4211 type as the returned expression. If EXP is not a comparison, we
4212 will most likely not be returning a useful value and range. Set
4213 *STRICT_OVERFLOW_P to true if the return value is only valid
4214 because signed overflow is undefined; otherwise, do not change
4215 *STRICT_OVERFLOW_P. */
4217 tree
4218 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4219 bool *strict_overflow_p)
4221 enum tree_code code;
4222 tree arg0, arg1 = NULL_TREE;
4223 tree exp_type, nexp;
4224 int in_p;
4225 tree low, high;
4226 location_t loc = EXPR_LOCATION (exp);
4228 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4229 and see if we can refine the range. Some of the cases below may not
4230 happen, but it doesn't seem worth worrying about this. We "continue"
4231 the outer loop when we've changed something; otherwise we "break"
4232 the switch, which will "break" the while. */
4234 in_p = 0;
4235 low = high = build_int_cst (TREE_TYPE (exp), 0);
4237 while (1)
4239 code = TREE_CODE (exp);
4240 exp_type = TREE_TYPE (exp);
4241 arg0 = NULL_TREE;
4243 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4245 if (TREE_OPERAND_LENGTH (exp) > 0)
4246 arg0 = TREE_OPERAND (exp, 0);
4247 if (TREE_CODE_CLASS (code) == tcc_binary
4248 || TREE_CODE_CLASS (code) == tcc_comparison
4249 || (TREE_CODE_CLASS (code) == tcc_expression
4250 && TREE_OPERAND_LENGTH (exp) > 1))
4251 arg1 = TREE_OPERAND (exp, 1);
4253 if (arg0 == NULL_TREE)
4254 break;
4256 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4257 &high, &in_p, strict_overflow_p);
4258 if (nexp == NULL_TREE)
4259 break;
4260 exp = nexp;
4263 /* If EXP is a constant, we can evaluate whether this is true or false. */
4264 if (TREE_CODE (exp) == INTEGER_CST)
4266 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4267 exp, 0, low, 0))
4268 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4269 exp, 1, high, 1)));
4270 low = high = 0;
4271 exp = 0;
4274 *pin_p = in_p, *plow = low, *phigh = high;
4275 return exp;
4278 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4279 type, TYPE, return an expression to test if EXP is in (or out of, depending
4280 on IN_P) the range. Return 0 if the test couldn't be created. */
4282 tree
4283 build_range_check (location_t loc, tree type, tree exp, int in_p,
4284 tree low, tree high)
4286 tree etype = TREE_TYPE (exp), value;
4288 #ifdef HAVE_canonicalize_funcptr_for_compare
4289 /* Disable this optimization for function pointer expressions
4290 on targets that require function pointer canonicalization. */
4291 if (HAVE_canonicalize_funcptr_for_compare
4292 && TREE_CODE (etype) == POINTER_TYPE
4293 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4294 return NULL_TREE;
4295 #endif
4297 if (! in_p)
4299 value = build_range_check (loc, type, exp, 1, low, high);
4300 if (value != 0)
4301 return invert_truthvalue_loc (loc, value);
4303 return 0;
4306 if (low == 0 && high == 0)
4307 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4309 if (low == 0)
4310 return fold_build2_loc (loc, LE_EXPR, type, exp,
4311 fold_convert_loc (loc, etype, high));
4313 if (high == 0)
4314 return fold_build2_loc (loc, GE_EXPR, type, exp,
4315 fold_convert_loc (loc, etype, low));
4317 if (operand_equal_p (low, high, 0))
4318 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4319 fold_convert_loc (loc, etype, low));
4321 if (integer_zerop (low))
4323 if (! TYPE_UNSIGNED (etype))
4325 etype = unsigned_type_for (etype);
4326 high = fold_convert_loc (loc, etype, high);
4327 exp = fold_convert_loc (loc, etype, exp);
4329 return build_range_check (loc, type, exp, 1, 0, high);
4332 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4333 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4335 unsigned HOST_WIDE_INT lo;
4336 HOST_WIDE_INT hi;
4337 int prec;
4339 prec = TYPE_PRECISION (etype);
4340 if (prec <= HOST_BITS_PER_WIDE_INT)
4342 hi = 0;
4343 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4345 else
4347 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4348 lo = HOST_WIDE_INT_M1U;
4351 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4353 if (TYPE_UNSIGNED (etype))
4355 tree signed_etype = signed_type_for (etype);
4356 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4357 etype
4358 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4359 else
4360 etype = signed_etype;
4361 exp = fold_convert_loc (loc, etype, exp);
4363 return fold_build2_loc (loc, GT_EXPR, type, exp,
4364 build_int_cst (etype, 0));
4368 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4369 This requires wrap-around arithmetics for the type of the expression.
4370 First make sure that arithmetics in this type is valid, then make sure
4371 that it wraps around. */
4372 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4373 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4374 TYPE_UNSIGNED (etype));
4376 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4378 tree utype, minv, maxv;
4380 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4381 for the type in question, as we rely on this here. */
4382 utype = unsigned_type_for (etype);
4383 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4384 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4385 integer_one_node, 1);
4386 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4388 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4389 minv, 1, maxv, 1)))
4390 etype = utype;
4391 else
4392 return 0;
4395 high = fold_convert_loc (loc, etype, high);
4396 low = fold_convert_loc (loc, etype, low);
4397 exp = fold_convert_loc (loc, etype, exp);
4399 value = const_binop (MINUS_EXPR, high, low);
4402 if (POINTER_TYPE_P (etype))
4404 if (value != 0 && !TREE_OVERFLOW (value))
4406 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4407 return build_range_check (loc, type,
4408 fold_build_pointer_plus_loc (loc, exp, low),
4409 1, build_int_cst (etype, 0), value);
4411 return 0;
4414 if (value != 0 && !TREE_OVERFLOW (value))
4415 return build_range_check (loc, type,
4416 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4417 1, build_int_cst (etype, 0), value);
4419 return 0;
4422 /* Return the predecessor of VAL in its type, handling the infinite case. */
4424 static tree
4425 range_predecessor (tree val)
4427 tree type = TREE_TYPE (val);
4429 if (INTEGRAL_TYPE_P (type)
4430 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4431 return 0;
4432 else
4433 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4436 /* Return the successor of VAL in its type, handling the infinite case. */
4438 static tree
4439 range_successor (tree val)
4441 tree type = TREE_TYPE (val);
4443 if (INTEGRAL_TYPE_P (type)
4444 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4445 return 0;
4446 else
4447 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4450 /* Given two ranges, see if we can merge them into one. Return 1 if we
4451 can, 0 if we can't. Set the output range into the specified parameters. */
4453 bool
4454 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4455 tree high0, int in1_p, tree low1, tree high1)
4457 int no_overlap;
4458 int subset;
4459 int temp;
4460 tree tem;
4461 int in_p;
4462 tree low, high;
4463 int lowequal = ((low0 == 0 && low1 == 0)
4464 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4465 low0, 0, low1, 0)));
4466 int highequal = ((high0 == 0 && high1 == 0)
4467 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4468 high0, 1, high1, 1)));
4470 /* Make range 0 be the range that starts first, or ends last if they
4471 start at the same value. Swap them if it isn't. */
4472 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4473 low0, 0, low1, 0))
4474 || (lowequal
4475 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4476 high1, 1, high0, 1))))
4478 temp = in0_p, in0_p = in1_p, in1_p = temp;
4479 tem = low0, low0 = low1, low1 = tem;
4480 tem = high0, high0 = high1, high1 = tem;
4483 /* Now flag two cases, whether the ranges are disjoint or whether the
4484 second range is totally subsumed in the first. Note that the tests
4485 below are simplified by the ones above. */
4486 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4487 high0, 1, low1, 0));
4488 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4489 high1, 1, high0, 1));
4491 /* We now have four cases, depending on whether we are including or
4492 excluding the two ranges. */
4493 if (in0_p && in1_p)
4495 /* If they don't overlap, the result is false. If the second range
4496 is a subset it is the result. Otherwise, the range is from the start
4497 of the second to the end of the first. */
4498 if (no_overlap)
4499 in_p = 0, low = high = 0;
4500 else if (subset)
4501 in_p = 1, low = low1, high = high1;
4502 else
4503 in_p = 1, low = low1, high = high0;
4506 else if (in0_p && ! in1_p)
4508 /* If they don't overlap, the result is the first range. If they are
4509 equal, the result is false. If the second range is a subset of the
4510 first, and the ranges begin at the same place, we go from just after
4511 the end of the second range to the end of the first. If the second
4512 range is not a subset of the first, or if it is a subset and both
4513 ranges end at the same place, the range starts at the start of the
4514 first range and ends just before the second range.
4515 Otherwise, we can't describe this as a single range. */
4516 if (no_overlap)
4517 in_p = 1, low = low0, high = high0;
4518 else if (lowequal && highequal)
4519 in_p = 0, low = high = 0;
4520 else if (subset && lowequal)
4522 low = range_successor (high1);
4523 high = high0;
4524 in_p = 1;
4525 if (low == 0)
4527 /* We are in the weird situation where high0 > high1 but
4528 high1 has no successor. Punt. */
4529 return 0;
4532 else if (! subset || highequal)
4534 low = low0;
4535 high = range_predecessor (low1);
4536 in_p = 1;
4537 if (high == 0)
4539 /* low0 < low1 but low1 has no predecessor. Punt. */
4540 return 0;
4543 else
4544 return 0;
4547 else if (! in0_p && in1_p)
4549 /* If they don't overlap, the result is the second range. If the second
4550 is a subset of the first, the result is false. Otherwise,
4551 the range starts just after the first range and ends at the
4552 end of the second. */
4553 if (no_overlap)
4554 in_p = 1, low = low1, high = high1;
4555 else if (subset || highequal)
4556 in_p = 0, low = high = 0;
4557 else
4559 low = range_successor (high0);
4560 high = high1;
4561 in_p = 1;
4562 if (low == 0)
4564 /* high1 > high0 but high0 has no successor. Punt. */
4565 return 0;
4570 else
4572 /* The case where we are excluding both ranges. Here the complex case
4573 is if they don't overlap. In that case, the only time we have a
4574 range is if they are adjacent. If the second is a subset of the
4575 first, the result is the first. Otherwise, the range to exclude
4576 starts at the beginning of the first range and ends at the end of the
4577 second. */
4578 if (no_overlap)
4580 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4581 range_successor (high0),
4582 1, low1, 0)))
4583 in_p = 0, low = low0, high = high1;
4584 else
4586 /* Canonicalize - [min, x] into - [-, x]. */
4587 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4588 switch (TREE_CODE (TREE_TYPE (low0)))
4590 case ENUMERAL_TYPE:
4591 if (TYPE_PRECISION (TREE_TYPE (low0))
4592 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4593 break;
4594 /* FALLTHROUGH */
4595 case INTEGER_TYPE:
4596 if (tree_int_cst_equal (low0,
4597 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4598 low0 = 0;
4599 break;
4600 case POINTER_TYPE:
4601 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4602 && integer_zerop (low0))
4603 low0 = 0;
4604 break;
4605 default:
4606 break;
4609 /* Canonicalize - [x, max] into - [x, -]. */
4610 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4611 switch (TREE_CODE (TREE_TYPE (high1)))
4613 case ENUMERAL_TYPE:
4614 if (TYPE_PRECISION (TREE_TYPE (high1))
4615 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4616 break;
4617 /* FALLTHROUGH */
4618 case INTEGER_TYPE:
4619 if (tree_int_cst_equal (high1,
4620 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4621 high1 = 0;
4622 break;
4623 case POINTER_TYPE:
4624 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4625 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4626 high1, 1,
4627 integer_one_node, 1)))
4628 high1 = 0;
4629 break;
4630 default:
4631 break;
4634 /* The ranges might be also adjacent between the maximum and
4635 minimum values of the given type. For
4636 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4637 return + [x + 1, y - 1]. */
4638 if (low0 == 0 && high1 == 0)
4640 low = range_successor (high0);
4641 high = range_predecessor (low1);
4642 if (low == 0 || high == 0)
4643 return 0;
4645 in_p = 1;
4647 else
4648 return 0;
4651 else if (subset)
4652 in_p = 0, low = low0, high = high0;
4653 else
4654 in_p = 0, low = low0, high = high1;
4657 *pin_p = in_p, *plow = low, *phigh = high;
4658 return 1;
4662 /* Subroutine of fold, looking inside expressions of the form
4663 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4664 of the COND_EXPR. This function is being used also to optimize
4665 A op B ? C : A, by reversing the comparison first.
4667 Return a folded expression whose code is not a COND_EXPR
4668 anymore, or NULL_TREE if no folding opportunity is found. */
4670 static tree
4671 fold_cond_expr_with_comparison (location_t loc, tree type,
4672 tree arg0, tree arg1, tree arg2)
4674 enum tree_code comp_code = TREE_CODE (arg0);
4675 tree arg00 = TREE_OPERAND (arg0, 0);
4676 tree arg01 = TREE_OPERAND (arg0, 1);
4677 tree arg1_type = TREE_TYPE (arg1);
4678 tree tem;
4680 STRIP_NOPS (arg1);
4681 STRIP_NOPS (arg2);
4683 /* If we have A op 0 ? A : -A, consider applying the following
4684 transformations:
4686 A == 0? A : -A same as -A
4687 A != 0? A : -A same as A
4688 A >= 0? A : -A same as abs (A)
4689 A > 0? A : -A same as abs (A)
4690 A <= 0? A : -A same as -abs (A)
4691 A < 0? A : -A same as -abs (A)
4693 None of these transformations work for modes with signed
4694 zeros. If A is +/-0, the first two transformations will
4695 change the sign of the result (from +0 to -0, or vice
4696 versa). The last four will fix the sign of the result,
4697 even though the original expressions could be positive or
4698 negative, depending on the sign of A.
4700 Note that all these transformations are correct if A is
4701 NaN, since the two alternatives (A and -A) are also NaNs. */
4702 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4703 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4704 ? real_zerop (arg01)
4705 : integer_zerop (arg01))
4706 && ((TREE_CODE (arg2) == NEGATE_EXPR
4707 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4708 /* In the case that A is of the form X-Y, '-A' (arg2) may
4709 have already been folded to Y-X, check for that. */
4710 || (TREE_CODE (arg1) == MINUS_EXPR
4711 && TREE_CODE (arg2) == MINUS_EXPR
4712 && operand_equal_p (TREE_OPERAND (arg1, 0),
4713 TREE_OPERAND (arg2, 1), 0)
4714 && operand_equal_p (TREE_OPERAND (arg1, 1),
4715 TREE_OPERAND (arg2, 0), 0))))
4716 switch (comp_code)
4718 case EQ_EXPR:
4719 case UNEQ_EXPR:
4720 tem = fold_convert_loc (loc, arg1_type, arg1);
4721 return pedantic_non_lvalue_loc (loc,
4722 fold_convert_loc (loc, type,
4723 negate_expr (tem)));
4724 case NE_EXPR:
4725 case LTGT_EXPR:
4726 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4727 case UNGE_EXPR:
4728 case UNGT_EXPR:
4729 if (flag_trapping_math)
4730 break;
4731 /* Fall through. */
4732 case GE_EXPR:
4733 case GT_EXPR:
4734 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4735 arg1 = fold_convert_loc (loc, signed_type_for
4736 (TREE_TYPE (arg1)), arg1);
4737 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4738 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4739 case UNLE_EXPR:
4740 case UNLT_EXPR:
4741 if (flag_trapping_math)
4742 break;
4743 case LE_EXPR:
4744 case LT_EXPR:
4745 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4746 arg1 = fold_convert_loc (loc, signed_type_for
4747 (TREE_TYPE (arg1)), arg1);
4748 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4749 return negate_expr (fold_convert_loc (loc, type, tem));
4750 default:
4751 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4752 break;
4755 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4756 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4757 both transformations are correct when A is NaN: A != 0
4758 is then true, and A == 0 is false. */
4760 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4761 && integer_zerop (arg01) && integer_zerop (arg2))
4763 if (comp_code == NE_EXPR)
4764 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4765 else if (comp_code == EQ_EXPR)
4766 return build_zero_cst (type);
4769 /* Try some transformations of A op B ? A : B.
4771 A == B? A : B same as B
4772 A != B? A : B same as A
4773 A >= B? A : B same as max (A, B)
4774 A > B? A : B same as max (B, A)
4775 A <= B? A : B same as min (A, B)
4776 A < B? A : B same as min (B, A)
4778 As above, these transformations don't work in the presence
4779 of signed zeros. For example, if A and B are zeros of
4780 opposite sign, the first two transformations will change
4781 the sign of the result. In the last four, the original
4782 expressions give different results for (A=+0, B=-0) and
4783 (A=-0, B=+0), but the transformed expressions do not.
4785 The first two transformations are correct if either A or B
4786 is a NaN. In the first transformation, the condition will
4787 be false, and B will indeed be chosen. In the case of the
4788 second transformation, the condition A != B will be true,
4789 and A will be chosen.
4791 The conversions to max() and min() are not correct if B is
4792 a number and A is not. The conditions in the original
4793 expressions will be false, so all four give B. The min()
4794 and max() versions would give a NaN instead. */
4795 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4796 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4797 /* Avoid these transformations if the COND_EXPR may be used
4798 as an lvalue in the C++ front-end. PR c++/19199. */
4799 && (in_gimple_form
4800 || VECTOR_TYPE_P (type)
4801 || (strcmp (lang_hooks.name, "GNU C++") != 0
4802 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4803 || ! maybe_lvalue_p (arg1)
4804 || ! maybe_lvalue_p (arg2)))
4806 tree comp_op0 = arg00;
4807 tree comp_op1 = arg01;
4808 tree comp_type = TREE_TYPE (comp_op0);
4810 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4811 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4813 comp_type = type;
4814 comp_op0 = arg1;
4815 comp_op1 = arg2;
4818 switch (comp_code)
4820 case EQ_EXPR:
4821 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4822 case NE_EXPR:
4823 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4824 case LE_EXPR:
4825 case LT_EXPR:
4826 case UNLE_EXPR:
4827 case UNLT_EXPR:
4828 /* In C++ a ?: expression can be an lvalue, so put the
4829 operand which will be used if they are equal first
4830 so that we can convert this back to the
4831 corresponding COND_EXPR. */
4832 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4834 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4835 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4836 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4837 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4838 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4839 comp_op1, comp_op0);
4840 return pedantic_non_lvalue_loc (loc,
4841 fold_convert_loc (loc, type, tem));
4843 break;
4844 case GE_EXPR:
4845 case GT_EXPR:
4846 case UNGE_EXPR:
4847 case UNGT_EXPR:
4848 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4850 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4851 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4852 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4853 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4854 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4855 comp_op1, comp_op0);
4856 return pedantic_non_lvalue_loc (loc,
4857 fold_convert_loc (loc, type, tem));
4859 break;
4860 case UNEQ_EXPR:
4861 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4862 return pedantic_non_lvalue_loc (loc,
4863 fold_convert_loc (loc, type, arg2));
4864 break;
4865 case LTGT_EXPR:
4866 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4867 return pedantic_non_lvalue_loc (loc,
4868 fold_convert_loc (loc, type, arg1));
4869 break;
4870 default:
4871 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4872 break;
4876 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4877 we might still be able to simplify this. For example,
4878 if C1 is one less or one more than C2, this might have started
4879 out as a MIN or MAX and been transformed by this function.
4880 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4882 if (INTEGRAL_TYPE_P (type)
4883 && TREE_CODE (arg01) == INTEGER_CST
4884 && TREE_CODE (arg2) == INTEGER_CST)
4885 switch (comp_code)
4887 case EQ_EXPR:
4888 if (TREE_CODE (arg1) == INTEGER_CST)
4889 break;
4890 /* We can replace A with C1 in this case. */
4891 arg1 = fold_convert_loc (loc, type, arg01);
4892 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4894 case LT_EXPR:
4895 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4896 MIN_EXPR, to preserve the signedness of the comparison. */
4897 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4898 OEP_ONLY_CONST)
4899 && operand_equal_p (arg01,
4900 const_binop (PLUS_EXPR, arg2,
4901 build_int_cst (type, 1)),
4902 OEP_ONLY_CONST))
4904 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4905 fold_convert_loc (loc, TREE_TYPE (arg00),
4906 arg2));
4907 return pedantic_non_lvalue_loc (loc,
4908 fold_convert_loc (loc, type, tem));
4910 break;
4912 case LE_EXPR:
4913 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4914 as above. */
4915 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4916 OEP_ONLY_CONST)
4917 && operand_equal_p (arg01,
4918 const_binop (MINUS_EXPR, arg2,
4919 build_int_cst (type, 1)),
4920 OEP_ONLY_CONST))
4922 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4923 fold_convert_loc (loc, TREE_TYPE (arg00),
4924 arg2));
4925 return pedantic_non_lvalue_loc (loc,
4926 fold_convert_loc (loc, type, tem));
4928 break;
4930 case GT_EXPR:
4931 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4932 MAX_EXPR, to preserve the signedness of the comparison. */
4933 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4934 OEP_ONLY_CONST)
4935 && operand_equal_p (arg01,
4936 const_binop (MINUS_EXPR, arg2,
4937 build_int_cst (type, 1)),
4938 OEP_ONLY_CONST))
4940 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4941 fold_convert_loc (loc, TREE_TYPE (arg00),
4942 arg2));
4943 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4945 break;
4947 case GE_EXPR:
4948 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4949 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4950 OEP_ONLY_CONST)
4951 && operand_equal_p (arg01,
4952 const_binop (PLUS_EXPR, arg2,
4953 build_int_cst (type, 1)),
4954 OEP_ONLY_CONST))
4956 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4957 fold_convert_loc (loc, TREE_TYPE (arg00),
4958 arg2));
4959 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4961 break;
4962 case NE_EXPR:
4963 break;
4964 default:
4965 gcc_unreachable ();
4968 return NULL_TREE;
4973 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4974 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4975 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4976 false) >= 2)
4977 #endif
4979 /* EXP is some logical combination of boolean tests. See if we can
4980 merge it into some range test. Return the new tree if so. */
4982 static tree
4983 fold_range_test (location_t loc, enum tree_code code, tree type,
4984 tree op0, tree op1)
4986 int or_op = (code == TRUTH_ORIF_EXPR
4987 || code == TRUTH_OR_EXPR);
4988 int in0_p, in1_p, in_p;
4989 tree low0, low1, low, high0, high1, high;
4990 bool strict_overflow_p = false;
4991 tree tem, lhs, rhs;
4992 const char * const warnmsg = G_("assuming signed overflow does not occur "
4993 "when simplifying range test");
4995 if (!INTEGRAL_TYPE_P (type))
4996 return 0;
4998 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4999 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5001 /* If this is an OR operation, invert both sides; we will invert
5002 again at the end. */
5003 if (or_op)
5004 in0_p = ! in0_p, in1_p = ! in1_p;
5006 /* If both expressions are the same, if we can merge the ranges, and we
5007 can build the range test, return it or it inverted. If one of the
5008 ranges is always true or always false, consider it to be the same
5009 expression as the other. */
5010 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5011 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5012 in1_p, low1, high1)
5013 && 0 != (tem = (build_range_check (loc, type,
5014 lhs != 0 ? lhs
5015 : rhs != 0 ? rhs : integer_zero_node,
5016 in_p, low, high))))
5018 if (strict_overflow_p)
5019 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5020 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5023 /* On machines where the branch cost is expensive, if this is a
5024 short-circuited branch and the underlying object on both sides
5025 is the same, make a non-short-circuit operation. */
5026 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5027 && lhs != 0 && rhs != 0
5028 && (code == TRUTH_ANDIF_EXPR
5029 || code == TRUTH_ORIF_EXPR)
5030 && operand_equal_p (lhs, rhs, 0))
5032 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5033 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5034 which cases we can't do this. */
5035 if (simple_operand_p (lhs))
5036 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5037 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5038 type, op0, op1);
5040 else if (!lang_hooks.decls.global_bindings_p ()
5041 && !CONTAINS_PLACEHOLDER_P (lhs))
5043 tree common = save_expr (lhs);
5045 if (0 != (lhs = build_range_check (loc, type, common,
5046 or_op ? ! in0_p : in0_p,
5047 low0, high0))
5048 && (0 != (rhs = build_range_check (loc, type, common,
5049 or_op ? ! in1_p : in1_p,
5050 low1, high1))))
5052 if (strict_overflow_p)
5053 fold_overflow_warning (warnmsg,
5054 WARN_STRICT_OVERFLOW_COMPARISON);
5055 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5056 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5057 type, lhs, rhs);
5062 return 0;
5065 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5066 bit value. Arrange things so the extra bits will be set to zero if and
5067 only if C is signed-extended to its full width. If MASK is nonzero,
5068 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5070 static tree
5071 unextend (tree c, int p, int unsignedp, tree mask)
5073 tree type = TREE_TYPE (c);
5074 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5075 tree temp;
5077 if (p == modesize || unsignedp)
5078 return c;
5080 /* We work by getting just the sign bit into the low-order bit, then
5081 into the high-order bit, then sign-extend. We then XOR that value
5082 with C. */
5083 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5084 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5086 /* We must use a signed type in order to get an arithmetic right shift.
5087 However, we must also avoid introducing accidental overflows, so that
5088 a subsequent call to integer_zerop will work. Hence we must
5089 do the type conversion here. At this point, the constant is either
5090 zero or one, and the conversion to a signed type can never overflow.
5091 We could get an overflow if this conversion is done anywhere else. */
5092 if (TYPE_UNSIGNED (type))
5093 temp = fold_convert (signed_type_for (type), temp);
5095 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5096 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5097 if (mask != 0)
5098 temp = const_binop (BIT_AND_EXPR, temp,
5099 fold_convert (TREE_TYPE (c), mask));
5100 /* If necessary, convert the type back to match the type of C. */
5101 if (TYPE_UNSIGNED (type))
5102 temp = fold_convert (type, temp);
5104 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5107 /* For an expression that has the form
5108 (A && B) || ~B
5110 (A || B) && ~B,
5111 we can drop one of the inner expressions and simplify to
5112 A || ~B
5114 A && ~B
5115 LOC is the location of the resulting expression. OP is the inner
5116 logical operation; the left-hand side in the examples above, while CMPOP
5117 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5118 removing a condition that guards another, as in
5119 (A != NULL && A->...) || A == NULL
5120 which we must not transform. If RHS_ONLY is true, only eliminate the
5121 right-most operand of the inner logical operation. */
5123 static tree
5124 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5125 bool rhs_only)
5127 tree type = TREE_TYPE (cmpop);
5128 enum tree_code code = TREE_CODE (cmpop);
5129 enum tree_code truthop_code = TREE_CODE (op);
5130 tree lhs = TREE_OPERAND (op, 0);
5131 tree rhs = TREE_OPERAND (op, 1);
5132 tree orig_lhs = lhs, orig_rhs = rhs;
5133 enum tree_code rhs_code = TREE_CODE (rhs);
5134 enum tree_code lhs_code = TREE_CODE (lhs);
5135 enum tree_code inv_code;
5137 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5138 return NULL_TREE;
5140 if (TREE_CODE_CLASS (code) != tcc_comparison)
5141 return NULL_TREE;
5143 if (rhs_code == truthop_code)
5145 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5146 if (newrhs != NULL_TREE)
5148 rhs = newrhs;
5149 rhs_code = TREE_CODE (rhs);
5152 if (lhs_code == truthop_code && !rhs_only)
5154 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5155 if (newlhs != NULL_TREE)
5157 lhs = newlhs;
5158 lhs_code = TREE_CODE (lhs);
5162 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5163 if (inv_code == rhs_code
5164 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5165 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5166 return lhs;
5167 if (!rhs_only && inv_code == lhs_code
5168 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5169 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5170 return rhs;
5171 if (rhs != orig_rhs || lhs != orig_lhs)
5172 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5173 lhs, rhs);
5174 return NULL_TREE;
5177 /* Find ways of folding logical expressions of LHS and RHS:
5178 Try to merge two comparisons to the same innermost item.
5179 Look for range tests like "ch >= '0' && ch <= '9'".
5180 Look for combinations of simple terms on machines with expensive branches
5181 and evaluate the RHS unconditionally.
5183 For example, if we have p->a == 2 && p->b == 4 and we can make an
5184 object large enough to span both A and B, we can do this with a comparison
5185 against the object ANDed with the a mask.
5187 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5188 operations to do this with one comparison.
5190 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5191 function and the one above.
5193 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5194 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5196 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5197 two operands.
5199 We return the simplified tree or 0 if no optimization is possible. */
5201 static tree
5202 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5203 tree lhs, tree rhs)
5205 /* If this is the "or" of two comparisons, we can do something if
5206 the comparisons are NE_EXPR. If this is the "and", we can do something
5207 if the comparisons are EQ_EXPR. I.e.,
5208 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5210 WANTED_CODE is this operation code. For single bit fields, we can
5211 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5212 comparison for one-bit fields. */
5214 enum tree_code wanted_code;
5215 enum tree_code lcode, rcode;
5216 tree ll_arg, lr_arg, rl_arg, rr_arg;
5217 tree ll_inner, lr_inner, rl_inner, rr_inner;
5218 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5219 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5220 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5221 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5222 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5223 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5224 enum machine_mode lnmode, rnmode;
5225 tree ll_mask, lr_mask, rl_mask, rr_mask;
5226 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5227 tree l_const, r_const;
5228 tree lntype, rntype, result;
5229 HOST_WIDE_INT first_bit, end_bit;
5230 int volatilep;
5232 /* Start by getting the comparison codes. Fail if anything is volatile.
5233 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5234 it were surrounded with a NE_EXPR. */
5236 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5237 return 0;
5239 lcode = TREE_CODE (lhs);
5240 rcode = TREE_CODE (rhs);
5242 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5244 lhs = build2 (NE_EXPR, truth_type, lhs,
5245 build_int_cst (TREE_TYPE (lhs), 0));
5246 lcode = NE_EXPR;
5249 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5251 rhs = build2 (NE_EXPR, truth_type, rhs,
5252 build_int_cst (TREE_TYPE (rhs), 0));
5253 rcode = NE_EXPR;
5256 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5257 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5258 return 0;
5260 ll_arg = TREE_OPERAND (lhs, 0);
5261 lr_arg = TREE_OPERAND (lhs, 1);
5262 rl_arg = TREE_OPERAND (rhs, 0);
5263 rr_arg = TREE_OPERAND (rhs, 1);
5265 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5266 if (simple_operand_p (ll_arg)
5267 && simple_operand_p (lr_arg))
5269 if (operand_equal_p (ll_arg, rl_arg, 0)
5270 && operand_equal_p (lr_arg, rr_arg, 0))
5272 result = combine_comparisons (loc, code, lcode, rcode,
5273 truth_type, ll_arg, lr_arg);
5274 if (result)
5275 return result;
5277 else if (operand_equal_p (ll_arg, rr_arg, 0)
5278 && operand_equal_p (lr_arg, rl_arg, 0))
5280 result = combine_comparisons (loc, code, lcode,
5281 swap_tree_comparison (rcode),
5282 truth_type, ll_arg, lr_arg);
5283 if (result)
5284 return result;
5288 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5289 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5291 /* If the RHS can be evaluated unconditionally and its operands are
5292 simple, it wins to evaluate the RHS unconditionally on machines
5293 with expensive branches. In this case, this isn't a comparison
5294 that can be merged. */
5296 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5297 false) >= 2
5298 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5299 && simple_operand_p (rl_arg)
5300 && simple_operand_p (rr_arg))
5302 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5303 if (code == TRUTH_OR_EXPR
5304 && lcode == NE_EXPR && integer_zerop (lr_arg)
5305 && rcode == NE_EXPR && integer_zerop (rr_arg)
5306 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5307 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5308 return build2_loc (loc, NE_EXPR, truth_type,
5309 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5310 ll_arg, rl_arg),
5311 build_int_cst (TREE_TYPE (ll_arg), 0));
5313 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5314 if (code == TRUTH_AND_EXPR
5315 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5316 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5317 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5318 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5319 return build2_loc (loc, EQ_EXPR, truth_type,
5320 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5321 ll_arg, rl_arg),
5322 build_int_cst (TREE_TYPE (ll_arg), 0));
5325 /* See if the comparisons can be merged. Then get all the parameters for
5326 each side. */
5328 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5329 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5330 return 0;
5332 volatilep = 0;
5333 ll_inner = decode_field_reference (loc, ll_arg,
5334 &ll_bitsize, &ll_bitpos, &ll_mode,
5335 &ll_unsignedp, &volatilep, &ll_mask,
5336 &ll_and_mask);
5337 lr_inner = decode_field_reference (loc, lr_arg,
5338 &lr_bitsize, &lr_bitpos, &lr_mode,
5339 &lr_unsignedp, &volatilep, &lr_mask,
5340 &lr_and_mask);
5341 rl_inner = decode_field_reference (loc, rl_arg,
5342 &rl_bitsize, &rl_bitpos, &rl_mode,
5343 &rl_unsignedp, &volatilep, &rl_mask,
5344 &rl_and_mask);
5345 rr_inner = decode_field_reference (loc, rr_arg,
5346 &rr_bitsize, &rr_bitpos, &rr_mode,
5347 &rr_unsignedp, &volatilep, &rr_mask,
5348 &rr_and_mask);
5350 /* It must be true that the inner operation on the lhs of each
5351 comparison must be the same if we are to be able to do anything.
5352 Then see if we have constants. If not, the same must be true for
5353 the rhs's. */
5354 if (volatilep || ll_inner == 0 || rl_inner == 0
5355 || ! operand_equal_p (ll_inner, rl_inner, 0))
5356 return 0;
5358 if (TREE_CODE (lr_arg) == INTEGER_CST
5359 && TREE_CODE (rr_arg) == INTEGER_CST)
5360 l_const = lr_arg, r_const = rr_arg;
5361 else if (lr_inner == 0 || rr_inner == 0
5362 || ! operand_equal_p (lr_inner, rr_inner, 0))
5363 return 0;
5364 else
5365 l_const = r_const = 0;
5367 /* If either comparison code is not correct for our logical operation,
5368 fail. However, we can convert a one-bit comparison against zero into
5369 the opposite comparison against that bit being set in the field. */
5371 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5372 if (lcode != wanted_code)
5374 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5376 /* Make the left operand unsigned, since we are only interested
5377 in the value of one bit. Otherwise we are doing the wrong
5378 thing below. */
5379 ll_unsignedp = 1;
5380 l_const = ll_mask;
5382 else
5383 return 0;
5386 /* This is analogous to the code for l_const above. */
5387 if (rcode != wanted_code)
5389 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5391 rl_unsignedp = 1;
5392 r_const = rl_mask;
5394 else
5395 return 0;
5398 /* See if we can find a mode that contains both fields being compared on
5399 the left. If we can't, fail. Otherwise, update all constants and masks
5400 to be relative to a field of that size. */
5401 first_bit = MIN (ll_bitpos, rl_bitpos);
5402 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5403 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5404 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5405 volatilep);
5406 if (lnmode == VOIDmode)
5407 return 0;
5409 lnbitsize = GET_MODE_BITSIZE (lnmode);
5410 lnbitpos = first_bit & ~ (lnbitsize - 1);
5411 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5412 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5414 if (BYTES_BIG_ENDIAN)
5416 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5417 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5420 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5421 size_int (xll_bitpos));
5422 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5423 size_int (xrl_bitpos));
5425 if (l_const)
5427 l_const = fold_convert_loc (loc, lntype, l_const);
5428 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5429 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5430 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5431 fold_build1_loc (loc, BIT_NOT_EXPR,
5432 lntype, ll_mask))))
5434 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5436 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5439 if (r_const)
5441 r_const = fold_convert_loc (loc, lntype, r_const);
5442 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5443 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5444 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5445 fold_build1_loc (loc, BIT_NOT_EXPR,
5446 lntype, rl_mask))))
5448 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5450 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5454 /* If the right sides are not constant, do the same for it. Also,
5455 disallow this optimization if a size or signedness mismatch occurs
5456 between the left and right sides. */
5457 if (l_const == 0)
5459 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5460 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5461 /* Make sure the two fields on the right
5462 correspond to the left without being swapped. */
5463 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5464 return 0;
5466 first_bit = MIN (lr_bitpos, rr_bitpos);
5467 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5468 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5469 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5470 volatilep);
5471 if (rnmode == VOIDmode)
5472 return 0;
5474 rnbitsize = GET_MODE_BITSIZE (rnmode);
5475 rnbitpos = first_bit & ~ (rnbitsize - 1);
5476 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5477 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5479 if (BYTES_BIG_ENDIAN)
5481 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5482 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5485 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5486 rntype, lr_mask),
5487 size_int (xlr_bitpos));
5488 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5489 rntype, rr_mask),
5490 size_int (xrr_bitpos));
5492 /* Make a mask that corresponds to both fields being compared.
5493 Do this for both items being compared. If the operands are the
5494 same size and the bits being compared are in the same position
5495 then we can do this by masking both and comparing the masked
5496 results. */
5497 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5498 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5499 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5501 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5502 ll_unsignedp || rl_unsignedp);
5503 if (! all_ones_mask_p (ll_mask, lnbitsize))
5504 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5506 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5507 lr_unsignedp || rr_unsignedp);
5508 if (! all_ones_mask_p (lr_mask, rnbitsize))
5509 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5511 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5514 /* There is still another way we can do something: If both pairs of
5515 fields being compared are adjacent, we may be able to make a wider
5516 field containing them both.
5518 Note that we still must mask the lhs/rhs expressions. Furthermore,
5519 the mask must be shifted to account for the shift done by
5520 make_bit_field_ref. */
5521 if ((ll_bitsize + ll_bitpos == rl_bitpos
5522 && lr_bitsize + lr_bitpos == rr_bitpos)
5523 || (ll_bitpos == rl_bitpos + rl_bitsize
5524 && lr_bitpos == rr_bitpos + rr_bitsize))
5526 tree type;
5528 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5529 ll_bitsize + rl_bitsize,
5530 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5531 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5532 lr_bitsize + rr_bitsize,
5533 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5535 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5536 size_int (MIN (xll_bitpos, xrl_bitpos)));
5537 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5538 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5540 /* Convert to the smaller type before masking out unwanted bits. */
5541 type = lntype;
5542 if (lntype != rntype)
5544 if (lnbitsize > rnbitsize)
5546 lhs = fold_convert_loc (loc, rntype, lhs);
5547 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5548 type = rntype;
5550 else if (lnbitsize < rnbitsize)
5552 rhs = fold_convert_loc (loc, lntype, rhs);
5553 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5554 type = lntype;
5558 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5559 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5561 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5562 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5564 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5567 return 0;
5570 /* Handle the case of comparisons with constants. If there is something in
5571 common between the masks, those bits of the constants must be the same.
5572 If not, the condition is always false. Test for this to avoid generating
5573 incorrect code below. */
5574 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5575 if (! integer_zerop (result)
5576 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5577 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5579 if (wanted_code == NE_EXPR)
5581 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5582 return constant_boolean_node (true, truth_type);
5584 else
5586 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5587 return constant_boolean_node (false, truth_type);
5591 /* Construct the expression we will return. First get the component
5592 reference we will make. Unless the mask is all ones the width of
5593 that field, perform the mask operation. Then compare with the
5594 merged constant. */
5595 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5596 ll_unsignedp || rl_unsignedp);
5598 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5599 if (! all_ones_mask_p (ll_mask, lnbitsize))
5600 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5602 return build2_loc (loc, wanted_code, truth_type, result,
5603 const_binop (BIT_IOR_EXPR, l_const, r_const));
5606 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5607 constant. */
5609 static tree
5610 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5611 tree op0, tree op1)
5613 tree arg0 = op0;
5614 enum tree_code op_code;
5615 tree comp_const;
5616 tree minmax_const;
5617 int consts_equal, consts_lt;
5618 tree inner;
5620 STRIP_SIGN_NOPS (arg0);
5622 op_code = TREE_CODE (arg0);
5623 minmax_const = TREE_OPERAND (arg0, 1);
5624 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5625 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5626 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5627 inner = TREE_OPERAND (arg0, 0);
5629 /* If something does not permit us to optimize, return the original tree. */
5630 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5631 || TREE_CODE (comp_const) != INTEGER_CST
5632 || TREE_OVERFLOW (comp_const)
5633 || TREE_CODE (minmax_const) != INTEGER_CST
5634 || TREE_OVERFLOW (minmax_const))
5635 return NULL_TREE;
5637 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5638 and GT_EXPR, doing the rest with recursive calls using logical
5639 simplifications. */
5640 switch (code)
5642 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5644 tree tem
5645 = optimize_minmax_comparison (loc,
5646 invert_tree_comparison (code, false),
5647 type, op0, op1);
5648 if (tem)
5649 return invert_truthvalue_loc (loc, tem);
5650 return NULL_TREE;
5653 case GE_EXPR:
5654 return
5655 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5656 optimize_minmax_comparison
5657 (loc, EQ_EXPR, type, arg0, comp_const),
5658 optimize_minmax_comparison
5659 (loc, GT_EXPR, type, arg0, comp_const));
5661 case EQ_EXPR:
5662 if (op_code == MAX_EXPR && consts_equal)
5663 /* MAX (X, 0) == 0 -> X <= 0 */
5664 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5666 else if (op_code == MAX_EXPR && consts_lt)
5667 /* MAX (X, 0) == 5 -> X == 5 */
5668 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5670 else if (op_code == MAX_EXPR)
5671 /* MAX (X, 0) == -1 -> false */
5672 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5674 else if (consts_equal)
5675 /* MIN (X, 0) == 0 -> X >= 0 */
5676 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5678 else if (consts_lt)
5679 /* MIN (X, 0) == 5 -> false */
5680 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5682 else
5683 /* MIN (X, 0) == -1 -> X == -1 */
5684 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5686 case GT_EXPR:
5687 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5688 /* MAX (X, 0) > 0 -> X > 0
5689 MAX (X, 0) > 5 -> X > 5 */
5690 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5692 else if (op_code == MAX_EXPR)
5693 /* MAX (X, 0) > -1 -> true */
5694 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5696 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5697 /* MIN (X, 0) > 0 -> false
5698 MIN (X, 0) > 5 -> false */
5699 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5701 else
5702 /* MIN (X, 0) > -1 -> X > -1 */
5703 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5705 default:
5706 return NULL_TREE;
5710 /* T is an integer expression that is being multiplied, divided, or taken a
5711 modulus (CODE says which and what kind of divide or modulus) by a
5712 constant C. See if we can eliminate that operation by folding it with
5713 other operations already in T. WIDE_TYPE, if non-null, is a type that
5714 should be used for the computation if wider than our type.
5716 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5717 (X * 2) + (Y * 4). We must, however, be assured that either the original
5718 expression would not overflow or that overflow is undefined for the type
5719 in the language in question.
5721 If we return a non-null expression, it is an equivalent form of the
5722 original computation, but need not be in the original type.
5724 We set *STRICT_OVERFLOW_P to true if the return values depends on
5725 signed overflow being undefined. Otherwise we do not change
5726 *STRICT_OVERFLOW_P. */
5728 static tree
5729 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5730 bool *strict_overflow_p)
5732 /* To avoid exponential search depth, refuse to allow recursion past
5733 three levels. Beyond that (1) it's highly unlikely that we'll find
5734 something interesting and (2) we've probably processed it before
5735 when we built the inner expression. */
5737 static int depth;
5738 tree ret;
5740 if (depth > 3)
5741 return NULL;
5743 depth++;
5744 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5745 depth--;
5747 return ret;
5750 static tree
5751 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5752 bool *strict_overflow_p)
5754 tree type = TREE_TYPE (t);
5755 enum tree_code tcode = TREE_CODE (t);
5756 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5757 > GET_MODE_SIZE (TYPE_MODE (type)))
5758 ? wide_type : type);
5759 tree t1, t2;
5760 int same_p = tcode == code;
5761 tree op0 = NULL_TREE, op1 = NULL_TREE;
5762 bool sub_strict_overflow_p;
5764 /* Don't deal with constants of zero here; they confuse the code below. */
5765 if (integer_zerop (c))
5766 return NULL_TREE;
5768 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5769 op0 = TREE_OPERAND (t, 0);
5771 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5772 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5774 /* Note that we need not handle conditional operations here since fold
5775 already handles those cases. So just do arithmetic here. */
5776 switch (tcode)
5778 case INTEGER_CST:
5779 /* For a constant, we can always simplify if we are a multiply
5780 or (for divide and modulus) if it is a multiple of our constant. */
5781 if (code == MULT_EXPR
5782 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5783 return const_binop (code, fold_convert (ctype, t),
5784 fold_convert (ctype, c));
5785 break;
5787 CASE_CONVERT: case NON_LVALUE_EXPR:
5788 /* If op0 is an expression ... */
5789 if ((COMPARISON_CLASS_P (op0)
5790 || UNARY_CLASS_P (op0)
5791 || BINARY_CLASS_P (op0)
5792 || VL_EXP_CLASS_P (op0)
5793 || EXPRESSION_CLASS_P (op0))
5794 /* ... and has wrapping overflow, and its type is smaller
5795 than ctype, then we cannot pass through as widening. */
5796 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5797 && (TYPE_PRECISION (ctype)
5798 > TYPE_PRECISION (TREE_TYPE (op0))))
5799 /* ... or this is a truncation (t is narrower than op0),
5800 then we cannot pass through this narrowing. */
5801 || (TYPE_PRECISION (type)
5802 < TYPE_PRECISION (TREE_TYPE (op0)))
5803 /* ... or signedness changes for division or modulus,
5804 then we cannot pass through this conversion. */
5805 || (code != MULT_EXPR
5806 && (TYPE_UNSIGNED (ctype)
5807 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5808 /* ... or has undefined overflow while the converted to
5809 type has not, we cannot do the operation in the inner type
5810 as that would introduce undefined overflow. */
5811 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5812 && !TYPE_OVERFLOW_UNDEFINED (type))))
5813 break;
5815 /* Pass the constant down and see if we can make a simplification. If
5816 we can, replace this expression with the inner simplification for
5817 possible later conversion to our or some other type. */
5818 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5819 && TREE_CODE (t2) == INTEGER_CST
5820 && !TREE_OVERFLOW (t2)
5821 && (0 != (t1 = extract_muldiv (op0, t2, code,
5822 code == MULT_EXPR
5823 ? ctype : NULL_TREE,
5824 strict_overflow_p))))
5825 return t1;
5826 break;
5828 case ABS_EXPR:
5829 /* If widening the type changes it from signed to unsigned, then we
5830 must avoid building ABS_EXPR itself as unsigned. */
5831 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5833 tree cstype = (*signed_type_for) (ctype);
5834 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5835 != 0)
5837 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5838 return fold_convert (ctype, t1);
5840 break;
5842 /* If the constant is negative, we cannot simplify this. */
5843 if (tree_int_cst_sgn (c) == -1)
5844 break;
5845 /* FALLTHROUGH */
5846 case NEGATE_EXPR:
5847 /* For division and modulus, type can't be unsigned, as e.g.
5848 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5849 For signed types, even with wrapping overflow, this is fine. */
5850 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5851 break;
5852 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5853 != 0)
5854 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5855 break;
5857 case MIN_EXPR: case MAX_EXPR:
5858 /* If widening the type changes the signedness, then we can't perform
5859 this optimization as that changes the result. */
5860 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5861 break;
5863 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5864 sub_strict_overflow_p = false;
5865 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5866 &sub_strict_overflow_p)) != 0
5867 && (t2 = extract_muldiv (op1, c, code, wide_type,
5868 &sub_strict_overflow_p)) != 0)
5870 if (tree_int_cst_sgn (c) < 0)
5871 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5872 if (sub_strict_overflow_p)
5873 *strict_overflow_p = true;
5874 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5875 fold_convert (ctype, t2));
5877 break;
5879 case LSHIFT_EXPR: case RSHIFT_EXPR:
5880 /* If the second operand is constant, this is a multiplication
5881 or floor division, by a power of two, so we can treat it that
5882 way unless the multiplier or divisor overflows. Signed
5883 left-shift overflow is implementation-defined rather than
5884 undefined in C90, so do not convert signed left shift into
5885 multiplication. */
5886 if (TREE_CODE (op1) == INTEGER_CST
5887 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5888 /* const_binop may not detect overflow correctly,
5889 so check for it explicitly here. */
5890 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5891 && TREE_INT_CST_HIGH (op1) == 0
5892 && 0 != (t1 = fold_convert (ctype,
5893 const_binop (LSHIFT_EXPR,
5894 size_one_node,
5895 op1)))
5896 && !TREE_OVERFLOW (t1))
5897 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5898 ? MULT_EXPR : FLOOR_DIV_EXPR,
5899 ctype,
5900 fold_convert (ctype, op0),
5901 t1),
5902 c, code, wide_type, strict_overflow_p);
5903 break;
5905 case PLUS_EXPR: case MINUS_EXPR:
5906 /* See if we can eliminate the operation on both sides. If we can, we
5907 can return a new PLUS or MINUS. If we can't, the only remaining
5908 cases where we can do anything are if the second operand is a
5909 constant. */
5910 sub_strict_overflow_p = false;
5911 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5912 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5913 if (t1 != 0 && t2 != 0
5914 && (code == MULT_EXPR
5915 /* If not multiplication, we can only do this if both operands
5916 are divisible by c. */
5917 || (multiple_of_p (ctype, op0, c)
5918 && multiple_of_p (ctype, op1, c))))
5920 if (sub_strict_overflow_p)
5921 *strict_overflow_p = true;
5922 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5923 fold_convert (ctype, t2));
5926 /* If this was a subtraction, negate OP1 and set it to be an addition.
5927 This simplifies the logic below. */
5928 if (tcode == MINUS_EXPR)
5930 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5931 /* If OP1 was not easily negatable, the constant may be OP0. */
5932 if (TREE_CODE (op0) == INTEGER_CST)
5934 tree tem = op0;
5935 op0 = op1;
5936 op1 = tem;
5937 tem = t1;
5938 t1 = t2;
5939 t2 = tem;
5943 if (TREE_CODE (op1) != INTEGER_CST)
5944 break;
5946 /* If either OP1 or C are negative, this optimization is not safe for
5947 some of the division and remainder types while for others we need
5948 to change the code. */
5949 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5951 if (code == CEIL_DIV_EXPR)
5952 code = FLOOR_DIV_EXPR;
5953 else if (code == FLOOR_DIV_EXPR)
5954 code = CEIL_DIV_EXPR;
5955 else if (code != MULT_EXPR
5956 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5957 break;
5960 /* If it's a multiply or a division/modulus operation of a multiple
5961 of our constant, do the operation and verify it doesn't overflow. */
5962 if (code == MULT_EXPR
5963 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5965 op1 = const_binop (code, fold_convert (ctype, op1),
5966 fold_convert (ctype, c));
5967 /* We allow the constant to overflow with wrapping semantics. */
5968 if (op1 == 0
5969 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5970 break;
5972 else
5973 break;
5975 /* If we have an unsigned type, we cannot widen the operation since it
5976 will change the result if the original computation overflowed. */
5977 if (TYPE_UNSIGNED (ctype) && ctype != type)
5978 break;
5980 /* If we were able to eliminate our operation from the first side,
5981 apply our operation to the second side and reform the PLUS. */
5982 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5983 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5985 /* The last case is if we are a multiply. In that case, we can
5986 apply the distributive law to commute the multiply and addition
5987 if the multiplication of the constants doesn't overflow
5988 and overflow is defined. With undefined overflow
5989 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5990 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5991 return fold_build2 (tcode, ctype,
5992 fold_build2 (code, ctype,
5993 fold_convert (ctype, op0),
5994 fold_convert (ctype, c)),
5995 op1);
5997 break;
5999 case MULT_EXPR:
6000 /* We have a special case here if we are doing something like
6001 (C * 8) % 4 since we know that's zero. */
6002 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6003 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6004 /* If the multiplication can overflow we cannot optimize this. */
6005 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6006 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6007 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6009 *strict_overflow_p = true;
6010 return omit_one_operand (type, integer_zero_node, op0);
6013 /* ... fall through ... */
6015 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6016 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6017 /* If we can extract our operation from the LHS, do so and return a
6018 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6019 do something only if the second operand is a constant. */
6020 if (same_p
6021 && (t1 = extract_muldiv (op0, c, code, wide_type,
6022 strict_overflow_p)) != 0)
6023 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6024 fold_convert (ctype, op1));
6025 else if (tcode == MULT_EXPR && code == MULT_EXPR
6026 && (t1 = extract_muldiv (op1, c, code, wide_type,
6027 strict_overflow_p)) != 0)
6028 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6029 fold_convert (ctype, t1));
6030 else if (TREE_CODE (op1) != INTEGER_CST)
6031 return 0;
6033 /* If these are the same operation types, we can associate them
6034 assuming no overflow. */
6035 if (tcode == code)
6037 double_int mul;
6038 bool overflow_p;
6039 unsigned prec = TYPE_PRECISION (ctype);
6040 bool uns = TYPE_UNSIGNED (ctype);
6041 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6042 double_int dic = tree_to_double_int (c).ext (prec, uns);
6043 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6044 overflow_p = ((!uns && overflow_p)
6045 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6046 if (!double_int_fits_to_tree_p (ctype, mul)
6047 && ((uns && tcode != MULT_EXPR) || !uns))
6048 overflow_p = 1;
6049 if (!overflow_p)
6050 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6051 double_int_to_tree (ctype, mul));
6054 /* If these operations "cancel" each other, we have the main
6055 optimizations of this pass, which occur when either constant is a
6056 multiple of the other, in which case we replace this with either an
6057 operation or CODE or TCODE.
6059 If we have an unsigned type, we cannot do this since it will change
6060 the result if the original computation overflowed. */
6061 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6062 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6063 || (tcode == MULT_EXPR
6064 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6065 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6066 && code != MULT_EXPR)))
6068 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6070 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6071 *strict_overflow_p = true;
6072 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6073 fold_convert (ctype,
6074 const_binop (TRUNC_DIV_EXPR,
6075 op1, c)));
6077 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6079 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6080 *strict_overflow_p = true;
6081 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6082 fold_convert (ctype,
6083 const_binop (TRUNC_DIV_EXPR,
6084 c, op1)));
6087 break;
6089 default:
6090 break;
6093 return 0;
6096 /* Return a node which has the indicated constant VALUE (either 0 or
6097 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6098 and is of the indicated TYPE. */
6100 tree
6101 constant_boolean_node (bool value, tree type)
6103 if (type == integer_type_node)
6104 return value ? integer_one_node : integer_zero_node;
6105 else if (type == boolean_type_node)
6106 return value ? boolean_true_node : boolean_false_node;
6107 else if (TREE_CODE (type) == VECTOR_TYPE)
6108 return build_vector_from_val (type,
6109 build_int_cst (TREE_TYPE (type),
6110 value ? -1 : 0));
6111 else
6112 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6116 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6117 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6118 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6119 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6120 COND is the first argument to CODE; otherwise (as in the example
6121 given here), it is the second argument. TYPE is the type of the
6122 original expression. Return NULL_TREE if no simplification is
6123 possible. */
6125 static tree
6126 fold_binary_op_with_conditional_arg (location_t loc,
6127 enum tree_code code,
6128 tree type, tree op0, tree op1,
6129 tree cond, tree arg, int cond_first_p)
6131 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6132 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6133 tree test, true_value, false_value;
6134 tree lhs = NULL_TREE;
6135 tree rhs = NULL_TREE;
6136 enum tree_code cond_code = COND_EXPR;
6138 if (TREE_CODE (cond) == COND_EXPR
6139 || TREE_CODE (cond) == VEC_COND_EXPR)
6141 test = TREE_OPERAND (cond, 0);
6142 true_value = TREE_OPERAND (cond, 1);
6143 false_value = TREE_OPERAND (cond, 2);
6144 /* If this operand throws an expression, then it does not make
6145 sense to try to perform a logical or arithmetic operation
6146 involving it. */
6147 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6148 lhs = true_value;
6149 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6150 rhs = false_value;
6152 else
6154 tree testtype = TREE_TYPE (cond);
6155 test = cond;
6156 true_value = constant_boolean_node (true, testtype);
6157 false_value = constant_boolean_node (false, testtype);
6160 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6161 cond_code = VEC_COND_EXPR;
6163 /* This transformation is only worthwhile if we don't have to wrap ARG
6164 in a SAVE_EXPR and the operation can be simplified without recursing
6165 on at least one of the branches once its pushed inside the COND_EXPR. */
6166 if (!TREE_CONSTANT (arg)
6167 && (TREE_SIDE_EFFECTS (arg)
6168 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6169 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6170 return NULL_TREE;
6172 arg = fold_convert_loc (loc, arg_type, arg);
6173 if (lhs == 0)
6175 true_value = fold_convert_loc (loc, cond_type, true_value);
6176 if (cond_first_p)
6177 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6178 else
6179 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6181 if (rhs == 0)
6183 false_value = fold_convert_loc (loc, cond_type, false_value);
6184 if (cond_first_p)
6185 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6186 else
6187 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6190 /* Check that we have simplified at least one of the branches. */
6191 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6192 return NULL_TREE;
6194 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6198 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6200 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6201 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6202 ADDEND is the same as X.
6204 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6205 and finite. The problematic cases are when X is zero, and its mode
6206 has signed zeros. In the case of rounding towards -infinity,
6207 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6208 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6210 bool
6211 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6213 if (!real_zerop (addend))
6214 return false;
6216 /* Don't allow the fold with -fsignaling-nans. */
6217 if (HONOR_SNANS (TYPE_MODE (type)))
6218 return false;
6220 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6221 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6222 return true;
6224 /* In a vector or complex, we would need to check the sign of all zeros. */
6225 if (TREE_CODE (addend) != REAL_CST)
6226 return false;
6228 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6229 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6230 negate = !negate;
6232 /* The mode has signed zeros, and we have to honor their sign.
6233 In this situation, there is only one case we can return true for.
6234 X - 0 is the same as X unless rounding towards -infinity is
6235 supported. */
6236 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6239 /* Subroutine of fold() that checks comparisons of built-in math
6240 functions against real constants.
6242 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6243 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6244 is the type of the result and ARG0 and ARG1 are the operands of the
6245 comparison. ARG1 must be a TREE_REAL_CST.
6247 The function returns the constant folded tree if a simplification
6248 can be made, and NULL_TREE otherwise. */
6250 static tree
6251 fold_mathfn_compare (location_t loc,
6252 enum built_in_function fcode, enum tree_code code,
6253 tree type, tree arg0, tree arg1)
6255 REAL_VALUE_TYPE c;
6257 if (BUILTIN_SQRT_P (fcode))
6259 tree arg = CALL_EXPR_ARG (arg0, 0);
6260 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6262 c = TREE_REAL_CST (arg1);
6263 if (REAL_VALUE_NEGATIVE (c))
6265 /* sqrt(x) < y is always false, if y is negative. */
6266 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6267 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6269 /* sqrt(x) > y is always true, if y is negative and we
6270 don't care about NaNs, i.e. negative values of x. */
6271 if (code == NE_EXPR || !HONOR_NANS (mode))
6272 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6274 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6275 return fold_build2_loc (loc, GE_EXPR, type, arg,
6276 build_real (TREE_TYPE (arg), dconst0));
6278 else if (code == GT_EXPR || code == GE_EXPR)
6280 REAL_VALUE_TYPE c2;
6282 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6283 real_convert (&c2, mode, &c2);
6285 if (REAL_VALUE_ISINF (c2))
6287 /* sqrt(x) > y is x == +Inf, when y is very large. */
6288 if (HONOR_INFINITIES (mode))
6289 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6290 build_real (TREE_TYPE (arg), c2));
6292 /* sqrt(x) > y is always false, when y is very large
6293 and we don't care about infinities. */
6294 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6297 /* sqrt(x) > c is the same as x > c*c. */
6298 return fold_build2_loc (loc, code, type, arg,
6299 build_real (TREE_TYPE (arg), c2));
6301 else if (code == LT_EXPR || code == LE_EXPR)
6303 REAL_VALUE_TYPE c2;
6305 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6306 real_convert (&c2, mode, &c2);
6308 if (REAL_VALUE_ISINF (c2))
6310 /* sqrt(x) < y is always true, when y is a very large
6311 value and we don't care about NaNs or Infinities. */
6312 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6313 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6315 /* sqrt(x) < y is x != +Inf when y is very large and we
6316 don't care about NaNs. */
6317 if (! HONOR_NANS (mode))
6318 return fold_build2_loc (loc, NE_EXPR, type, arg,
6319 build_real (TREE_TYPE (arg), c2));
6321 /* sqrt(x) < y is x >= 0 when y is very large and we
6322 don't care about Infinities. */
6323 if (! HONOR_INFINITIES (mode))
6324 return fold_build2_loc (loc, GE_EXPR, type, arg,
6325 build_real (TREE_TYPE (arg), dconst0));
6327 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6328 arg = save_expr (arg);
6329 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6330 fold_build2_loc (loc, GE_EXPR, type, arg,
6331 build_real (TREE_TYPE (arg),
6332 dconst0)),
6333 fold_build2_loc (loc, NE_EXPR, type, arg,
6334 build_real (TREE_TYPE (arg),
6335 c2)));
6338 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6339 if (! HONOR_NANS (mode))
6340 return fold_build2_loc (loc, code, type, arg,
6341 build_real (TREE_TYPE (arg), c2));
6343 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6344 arg = save_expr (arg);
6345 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6346 fold_build2_loc (loc, GE_EXPR, type, arg,
6347 build_real (TREE_TYPE (arg),
6348 dconst0)),
6349 fold_build2_loc (loc, code, type, arg,
6350 build_real (TREE_TYPE (arg),
6351 c2)));
6355 return NULL_TREE;
6358 /* Subroutine of fold() that optimizes comparisons against Infinities,
6359 either +Inf or -Inf.
6361 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6362 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6363 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6365 The function returns the constant folded tree if a simplification
6366 can be made, and NULL_TREE otherwise. */
6368 static tree
6369 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6370 tree arg0, tree arg1)
6372 enum machine_mode mode;
6373 REAL_VALUE_TYPE max;
6374 tree temp;
6375 bool neg;
6377 mode = TYPE_MODE (TREE_TYPE (arg0));
6379 /* For negative infinity swap the sense of the comparison. */
6380 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6381 if (neg)
6382 code = swap_tree_comparison (code);
6384 switch (code)
6386 case GT_EXPR:
6387 /* x > +Inf is always false, if with ignore sNANs. */
6388 if (HONOR_SNANS (mode))
6389 return NULL_TREE;
6390 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6392 case LE_EXPR:
6393 /* x <= +Inf is always true, if we don't case about NaNs. */
6394 if (! HONOR_NANS (mode))
6395 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6397 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6398 arg0 = save_expr (arg0);
6399 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6401 case EQ_EXPR:
6402 case GE_EXPR:
6403 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6404 real_maxval (&max, neg, mode);
6405 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6406 arg0, build_real (TREE_TYPE (arg0), max));
6408 case LT_EXPR:
6409 /* x < +Inf is always equal to x <= DBL_MAX. */
6410 real_maxval (&max, neg, mode);
6411 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6412 arg0, build_real (TREE_TYPE (arg0), max));
6414 case NE_EXPR:
6415 /* x != +Inf is always equal to !(x > DBL_MAX). */
6416 real_maxval (&max, neg, mode);
6417 if (! HONOR_NANS (mode))
6418 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6419 arg0, build_real (TREE_TYPE (arg0), max));
6421 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6422 arg0, build_real (TREE_TYPE (arg0), max));
6423 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6425 default:
6426 break;
6429 return NULL_TREE;
6432 /* Subroutine of fold() that optimizes comparisons of a division by
6433 a nonzero integer constant against an integer constant, i.e.
6434 X/C1 op C2.
6436 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6437 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6438 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6440 The function returns the constant folded tree if a simplification
6441 can be made, and NULL_TREE otherwise. */
6443 static tree
6444 fold_div_compare (location_t loc,
6445 enum tree_code code, tree type, tree arg0, tree arg1)
6447 tree prod, tmp, hi, lo;
6448 tree arg00 = TREE_OPERAND (arg0, 0);
6449 tree arg01 = TREE_OPERAND (arg0, 1);
6450 double_int val;
6451 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6452 bool neg_overflow;
6453 bool overflow;
6455 /* We have to do this the hard way to detect unsigned overflow.
6456 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6457 val = TREE_INT_CST (arg01)
6458 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6459 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6460 neg_overflow = false;
6462 if (unsigned_p)
6464 tmp = int_const_binop (MINUS_EXPR, arg01,
6465 build_int_cst (TREE_TYPE (arg01), 1));
6466 lo = prod;
6468 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6469 val = TREE_INT_CST (prod)
6470 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6471 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6472 -1, overflow | TREE_OVERFLOW (prod));
6474 else if (tree_int_cst_sgn (arg01) >= 0)
6476 tmp = int_const_binop (MINUS_EXPR, arg01,
6477 build_int_cst (TREE_TYPE (arg01), 1));
6478 switch (tree_int_cst_sgn (arg1))
6480 case -1:
6481 neg_overflow = true;
6482 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6483 hi = prod;
6484 break;
6486 case 0:
6487 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6488 hi = tmp;
6489 break;
6491 case 1:
6492 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6493 lo = prod;
6494 break;
6496 default:
6497 gcc_unreachable ();
6500 else
6502 /* A negative divisor reverses the relational operators. */
6503 code = swap_tree_comparison (code);
6505 tmp = int_const_binop (PLUS_EXPR, arg01,
6506 build_int_cst (TREE_TYPE (arg01), 1));
6507 switch (tree_int_cst_sgn (arg1))
6509 case -1:
6510 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6511 lo = prod;
6512 break;
6514 case 0:
6515 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6516 lo = tmp;
6517 break;
6519 case 1:
6520 neg_overflow = true;
6521 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6522 hi = prod;
6523 break;
6525 default:
6526 gcc_unreachable ();
6530 switch (code)
6532 case EQ_EXPR:
6533 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6534 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6535 if (TREE_OVERFLOW (hi))
6536 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6537 if (TREE_OVERFLOW (lo))
6538 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6539 return build_range_check (loc, type, arg00, 1, lo, hi);
6541 case NE_EXPR:
6542 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6543 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6544 if (TREE_OVERFLOW (hi))
6545 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6546 if (TREE_OVERFLOW (lo))
6547 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6548 return build_range_check (loc, type, arg00, 0, lo, hi);
6550 case LT_EXPR:
6551 if (TREE_OVERFLOW (lo))
6553 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6554 return omit_one_operand_loc (loc, type, tmp, arg00);
6556 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6558 case LE_EXPR:
6559 if (TREE_OVERFLOW (hi))
6561 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6562 return omit_one_operand_loc (loc, type, tmp, arg00);
6564 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6566 case GT_EXPR:
6567 if (TREE_OVERFLOW (hi))
6569 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6570 return omit_one_operand_loc (loc, type, tmp, arg00);
6572 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6574 case GE_EXPR:
6575 if (TREE_OVERFLOW (lo))
6577 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6578 return omit_one_operand_loc (loc, type, tmp, arg00);
6580 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6582 default:
6583 break;
6586 return NULL_TREE;
6590 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6591 equality/inequality test, then return a simplified form of the test
6592 using a sign testing. Otherwise return NULL. TYPE is the desired
6593 result type. */
6595 static tree
6596 fold_single_bit_test_into_sign_test (location_t loc,
6597 enum tree_code code, tree arg0, tree arg1,
6598 tree result_type)
6600 /* If this is testing a single bit, we can optimize the test. */
6601 if ((code == NE_EXPR || code == EQ_EXPR)
6602 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6603 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6605 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6606 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6607 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6609 if (arg00 != NULL_TREE
6610 /* This is only a win if casting to a signed type is cheap,
6611 i.e. when arg00's type is not a partial mode. */
6612 && TYPE_PRECISION (TREE_TYPE (arg00))
6613 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6615 tree stype = signed_type_for (TREE_TYPE (arg00));
6616 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6617 result_type,
6618 fold_convert_loc (loc, stype, arg00),
6619 build_int_cst (stype, 0));
6623 return NULL_TREE;
6626 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6627 equality/inequality test, then return a simplified form of
6628 the test using shifts and logical operations. Otherwise return
6629 NULL. TYPE is the desired result type. */
6631 tree
6632 fold_single_bit_test (location_t loc, enum tree_code code,
6633 tree arg0, tree arg1, tree result_type)
6635 /* If this is testing a single bit, we can optimize the test. */
6636 if ((code == NE_EXPR || code == EQ_EXPR)
6637 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6638 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6640 tree inner = TREE_OPERAND (arg0, 0);
6641 tree type = TREE_TYPE (arg0);
6642 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6643 enum machine_mode operand_mode = TYPE_MODE (type);
6644 int ops_unsigned;
6645 tree signed_type, unsigned_type, intermediate_type;
6646 tree tem, one;
6648 /* First, see if we can fold the single bit test into a sign-bit
6649 test. */
6650 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6651 result_type);
6652 if (tem)
6653 return tem;
6655 /* Otherwise we have (A & C) != 0 where C is a single bit,
6656 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6657 Similarly for (A & C) == 0. */
6659 /* If INNER is a right shift of a constant and it plus BITNUM does
6660 not overflow, adjust BITNUM and INNER. */
6661 if (TREE_CODE (inner) == RSHIFT_EXPR
6662 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6663 && tree_fits_uhwi_p (TREE_OPERAND (inner, 1))
6664 && bitnum < TYPE_PRECISION (type)
6665 && (tree_to_uhwi (TREE_OPERAND (inner, 1))
6666 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6668 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6669 inner = TREE_OPERAND (inner, 0);
6672 /* If we are going to be able to omit the AND below, we must do our
6673 operations as unsigned. If we must use the AND, we have a choice.
6674 Normally unsigned is faster, but for some machines signed is. */
6675 #ifdef LOAD_EXTEND_OP
6676 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6677 && !flag_syntax_only) ? 0 : 1;
6678 #else
6679 ops_unsigned = 1;
6680 #endif
6682 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6683 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6684 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6685 inner = fold_convert_loc (loc, intermediate_type, inner);
6687 if (bitnum != 0)
6688 inner = build2 (RSHIFT_EXPR, intermediate_type,
6689 inner, size_int (bitnum));
6691 one = build_int_cst (intermediate_type, 1);
6693 if (code == EQ_EXPR)
6694 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6696 /* Put the AND last so it can combine with more things. */
6697 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6699 /* Make sure to return the proper type. */
6700 inner = fold_convert_loc (loc, result_type, inner);
6702 return inner;
6704 return NULL_TREE;
6707 /* Check whether we are allowed to reorder operands arg0 and arg1,
6708 such that the evaluation of arg1 occurs before arg0. */
6710 static bool
6711 reorder_operands_p (const_tree arg0, const_tree arg1)
6713 if (! flag_evaluation_order)
6714 return true;
6715 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6716 return true;
6717 return ! TREE_SIDE_EFFECTS (arg0)
6718 && ! TREE_SIDE_EFFECTS (arg1);
6721 /* Test whether it is preferable two swap two operands, ARG0 and
6722 ARG1, for example because ARG0 is an integer constant and ARG1
6723 isn't. If REORDER is true, only recommend swapping if we can
6724 evaluate the operands in reverse order. */
6726 bool
6727 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6729 STRIP_SIGN_NOPS (arg0);
6730 STRIP_SIGN_NOPS (arg1);
6732 if (TREE_CODE (arg1) == INTEGER_CST)
6733 return 0;
6734 if (TREE_CODE (arg0) == INTEGER_CST)
6735 return 1;
6737 if (TREE_CODE (arg1) == REAL_CST)
6738 return 0;
6739 if (TREE_CODE (arg0) == REAL_CST)
6740 return 1;
6742 if (TREE_CODE (arg1) == FIXED_CST)
6743 return 0;
6744 if (TREE_CODE (arg0) == FIXED_CST)
6745 return 1;
6747 if (TREE_CODE (arg1) == COMPLEX_CST)
6748 return 0;
6749 if (TREE_CODE (arg0) == COMPLEX_CST)
6750 return 1;
6752 if (TREE_CONSTANT (arg1))
6753 return 0;
6754 if (TREE_CONSTANT (arg0))
6755 return 1;
6757 if (optimize_function_for_size_p (cfun))
6758 return 0;
6760 if (reorder && flag_evaluation_order
6761 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6762 return 0;
6764 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6765 for commutative and comparison operators. Ensuring a canonical
6766 form allows the optimizers to find additional redundancies without
6767 having to explicitly check for both orderings. */
6768 if (TREE_CODE (arg0) == SSA_NAME
6769 && TREE_CODE (arg1) == SSA_NAME
6770 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6771 return 1;
6773 /* Put SSA_NAMEs last. */
6774 if (TREE_CODE (arg1) == SSA_NAME)
6775 return 0;
6776 if (TREE_CODE (arg0) == SSA_NAME)
6777 return 1;
6779 /* Put variables last. */
6780 if (DECL_P (arg1))
6781 return 0;
6782 if (DECL_P (arg0))
6783 return 1;
6785 return 0;
6788 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6789 ARG0 is extended to a wider type. */
6791 static tree
6792 fold_widened_comparison (location_t loc, enum tree_code code,
6793 tree type, tree arg0, tree arg1)
6795 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6796 tree arg1_unw;
6797 tree shorter_type, outer_type;
6798 tree min, max;
6799 bool above, below;
6801 if (arg0_unw == arg0)
6802 return NULL_TREE;
6803 shorter_type = TREE_TYPE (arg0_unw);
6805 #ifdef HAVE_canonicalize_funcptr_for_compare
6806 /* Disable this optimization if we're casting a function pointer
6807 type on targets that require function pointer canonicalization. */
6808 if (HAVE_canonicalize_funcptr_for_compare
6809 && TREE_CODE (shorter_type) == POINTER_TYPE
6810 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6811 return NULL_TREE;
6812 #endif
6814 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6815 return NULL_TREE;
6817 arg1_unw = get_unwidened (arg1, NULL_TREE);
6819 /* If possible, express the comparison in the shorter mode. */
6820 if ((code == EQ_EXPR || code == NE_EXPR
6821 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6822 && (TREE_TYPE (arg1_unw) == shorter_type
6823 || ((TYPE_PRECISION (shorter_type)
6824 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6825 && (TYPE_UNSIGNED (shorter_type)
6826 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6827 || (TREE_CODE (arg1_unw) == INTEGER_CST
6828 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6829 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6830 && int_fits_type_p (arg1_unw, shorter_type))))
6831 return fold_build2_loc (loc, code, type, arg0_unw,
6832 fold_convert_loc (loc, shorter_type, arg1_unw));
6834 if (TREE_CODE (arg1_unw) != INTEGER_CST
6835 || TREE_CODE (shorter_type) != INTEGER_TYPE
6836 || !int_fits_type_p (arg1_unw, shorter_type))
6837 return NULL_TREE;
6839 /* If we are comparing with the integer that does not fit into the range
6840 of the shorter type, the result is known. */
6841 outer_type = TREE_TYPE (arg1_unw);
6842 min = lower_bound_in_type (outer_type, shorter_type);
6843 max = upper_bound_in_type (outer_type, shorter_type);
6845 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6846 max, arg1_unw));
6847 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6848 arg1_unw, min));
6850 switch (code)
6852 case EQ_EXPR:
6853 if (above || below)
6854 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6855 break;
6857 case NE_EXPR:
6858 if (above || below)
6859 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6860 break;
6862 case LT_EXPR:
6863 case LE_EXPR:
6864 if (above)
6865 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6866 else if (below)
6867 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6869 case GT_EXPR:
6870 case GE_EXPR:
6871 if (above)
6872 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6873 else if (below)
6874 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6876 default:
6877 break;
6880 return NULL_TREE;
6883 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6884 ARG0 just the signedness is changed. */
6886 static tree
6887 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6888 tree arg0, tree arg1)
6890 tree arg0_inner;
6891 tree inner_type, outer_type;
6893 if (!CONVERT_EXPR_P (arg0))
6894 return NULL_TREE;
6896 outer_type = TREE_TYPE (arg0);
6897 arg0_inner = TREE_OPERAND (arg0, 0);
6898 inner_type = TREE_TYPE (arg0_inner);
6900 #ifdef HAVE_canonicalize_funcptr_for_compare
6901 /* Disable this optimization if we're casting a function pointer
6902 type on targets that require function pointer canonicalization. */
6903 if (HAVE_canonicalize_funcptr_for_compare
6904 && TREE_CODE (inner_type) == POINTER_TYPE
6905 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6906 return NULL_TREE;
6907 #endif
6909 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6910 return NULL_TREE;
6912 if (TREE_CODE (arg1) != INTEGER_CST
6913 && !(CONVERT_EXPR_P (arg1)
6914 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6915 return NULL_TREE;
6917 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6918 && code != NE_EXPR
6919 && code != EQ_EXPR)
6920 return NULL_TREE;
6922 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6923 return NULL_TREE;
6925 if (TREE_CODE (arg1) == INTEGER_CST)
6926 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6927 0, TREE_OVERFLOW (arg1));
6928 else
6929 arg1 = fold_convert_loc (loc, inner_type, arg1);
6931 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6934 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6935 step of the array. Reconstructs s and delta in the case of s *
6936 delta being an integer constant (and thus already folded). ADDR is
6937 the address. MULT is the multiplicative expression. If the
6938 function succeeds, the new address expression is returned.
6939 Otherwise NULL_TREE is returned. LOC is the location of the
6940 resulting expression. */
6942 static tree
6943 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6945 tree s, delta, step;
6946 tree ref = TREE_OPERAND (addr, 0), pref;
6947 tree ret, pos;
6948 tree itype;
6949 bool mdim = false;
6951 /* Strip the nops that might be added when converting op1 to sizetype. */
6952 STRIP_NOPS (op1);
6954 /* Canonicalize op1 into a possibly non-constant delta
6955 and an INTEGER_CST s. */
6956 if (TREE_CODE (op1) == MULT_EXPR)
6958 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6960 STRIP_NOPS (arg0);
6961 STRIP_NOPS (arg1);
6963 if (TREE_CODE (arg0) == INTEGER_CST)
6965 s = arg0;
6966 delta = arg1;
6968 else if (TREE_CODE (arg1) == INTEGER_CST)
6970 s = arg1;
6971 delta = arg0;
6973 else
6974 return NULL_TREE;
6976 else if (TREE_CODE (op1) == INTEGER_CST)
6978 delta = op1;
6979 s = NULL_TREE;
6981 else
6983 /* Simulate we are delta * 1. */
6984 delta = op1;
6985 s = integer_one_node;
6988 /* Handle &x.array the same as we would handle &x.array[0]. */
6989 if (TREE_CODE (ref) == COMPONENT_REF
6990 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6992 tree domain;
6994 /* Remember if this was a multi-dimensional array. */
6995 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6996 mdim = true;
6998 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6999 if (! domain)
7000 goto cont;
7001 itype = TREE_TYPE (domain);
7003 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
7004 if (TREE_CODE (step) != INTEGER_CST)
7005 goto cont;
7007 if (s)
7009 if (! tree_int_cst_equal (step, s))
7010 goto cont;
7012 else
7014 /* Try if delta is a multiple of step. */
7015 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7016 if (! tmp)
7017 goto cont;
7018 delta = tmp;
7021 /* Only fold here if we can verify we do not overflow one
7022 dimension of a multi-dimensional array. */
7023 if (mdim)
7025 tree tmp;
7027 if (!TYPE_MIN_VALUE (domain)
7028 || !TYPE_MAX_VALUE (domain)
7029 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7030 goto cont;
7032 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7033 fold_convert_loc (loc, itype,
7034 TYPE_MIN_VALUE (domain)),
7035 fold_convert_loc (loc, itype, delta));
7036 if (TREE_CODE (tmp) != INTEGER_CST
7037 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7038 goto cont;
7041 /* We found a suitable component reference. */
7043 pref = TREE_OPERAND (addr, 0);
7044 ret = copy_node (pref);
7045 SET_EXPR_LOCATION (ret, loc);
7047 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7048 fold_build2_loc
7049 (loc, PLUS_EXPR, itype,
7050 fold_convert_loc (loc, itype,
7051 TYPE_MIN_VALUE
7052 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7053 fold_convert_loc (loc, itype, delta)),
7054 NULL_TREE, NULL_TREE);
7055 return build_fold_addr_expr_loc (loc, ret);
7058 cont:
7060 for (;; ref = TREE_OPERAND (ref, 0))
7062 if (TREE_CODE (ref) == ARRAY_REF)
7064 tree domain;
7066 /* Remember if this was a multi-dimensional array. */
7067 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7068 mdim = true;
7070 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7071 if (! domain)
7072 continue;
7073 itype = TREE_TYPE (domain);
7075 step = array_ref_element_size (ref);
7076 if (TREE_CODE (step) != INTEGER_CST)
7077 continue;
7079 if (s)
7081 if (! tree_int_cst_equal (step, s))
7082 continue;
7084 else
7086 /* Try if delta is a multiple of step. */
7087 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7088 if (! tmp)
7089 continue;
7090 delta = tmp;
7093 /* Only fold here if we can verify we do not overflow one
7094 dimension of a multi-dimensional array. */
7095 if (mdim)
7097 tree tmp;
7099 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7100 || !TYPE_MAX_VALUE (domain)
7101 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7102 continue;
7104 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7105 fold_convert_loc (loc, itype,
7106 TREE_OPERAND (ref, 1)),
7107 fold_convert_loc (loc, itype, delta));
7108 if (!tmp
7109 || TREE_CODE (tmp) != INTEGER_CST
7110 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7111 continue;
7114 break;
7116 else
7117 mdim = false;
7119 if (!handled_component_p (ref))
7120 return NULL_TREE;
7123 /* We found the suitable array reference. So copy everything up to it,
7124 and replace the index. */
7126 pref = TREE_OPERAND (addr, 0);
7127 ret = copy_node (pref);
7128 SET_EXPR_LOCATION (ret, loc);
7129 pos = ret;
7131 while (pref != ref)
7133 pref = TREE_OPERAND (pref, 0);
7134 TREE_OPERAND (pos, 0) = copy_node (pref);
7135 pos = TREE_OPERAND (pos, 0);
7138 TREE_OPERAND (pos, 1)
7139 = fold_build2_loc (loc, PLUS_EXPR, itype,
7140 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7141 fold_convert_loc (loc, itype, delta));
7142 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7146 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7147 means A >= Y && A != MAX, but in this case we know that
7148 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7150 static tree
7151 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7153 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7155 if (TREE_CODE (bound) == LT_EXPR)
7156 a = TREE_OPERAND (bound, 0);
7157 else if (TREE_CODE (bound) == GT_EXPR)
7158 a = TREE_OPERAND (bound, 1);
7159 else
7160 return NULL_TREE;
7162 typea = TREE_TYPE (a);
7163 if (!INTEGRAL_TYPE_P (typea)
7164 && !POINTER_TYPE_P (typea))
7165 return NULL_TREE;
7167 if (TREE_CODE (ineq) == LT_EXPR)
7169 a1 = TREE_OPERAND (ineq, 1);
7170 y = TREE_OPERAND (ineq, 0);
7172 else if (TREE_CODE (ineq) == GT_EXPR)
7174 a1 = TREE_OPERAND (ineq, 0);
7175 y = TREE_OPERAND (ineq, 1);
7177 else
7178 return NULL_TREE;
7180 if (TREE_TYPE (a1) != typea)
7181 return NULL_TREE;
7183 if (POINTER_TYPE_P (typea))
7185 /* Convert the pointer types into integer before taking the difference. */
7186 tree ta = fold_convert_loc (loc, ssizetype, a);
7187 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7188 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7190 else
7191 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7193 if (!diff || !integer_onep (diff))
7194 return NULL_TREE;
7196 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7199 /* Fold a sum or difference of at least one multiplication.
7200 Returns the folded tree or NULL if no simplification could be made. */
7202 static tree
7203 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7204 tree arg0, tree arg1)
7206 tree arg00, arg01, arg10, arg11;
7207 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7209 /* (A * C) +- (B * C) -> (A+-B) * C.
7210 (A * C) +- A -> A * (C+-1).
7211 We are most concerned about the case where C is a constant,
7212 but other combinations show up during loop reduction. Since
7213 it is not difficult, try all four possibilities. */
7215 if (TREE_CODE (arg0) == MULT_EXPR)
7217 arg00 = TREE_OPERAND (arg0, 0);
7218 arg01 = TREE_OPERAND (arg0, 1);
7220 else if (TREE_CODE (arg0) == INTEGER_CST)
7222 arg00 = build_one_cst (type);
7223 arg01 = arg0;
7225 else
7227 /* We cannot generate constant 1 for fract. */
7228 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7229 return NULL_TREE;
7230 arg00 = arg0;
7231 arg01 = build_one_cst (type);
7233 if (TREE_CODE (arg1) == MULT_EXPR)
7235 arg10 = TREE_OPERAND (arg1, 0);
7236 arg11 = TREE_OPERAND (arg1, 1);
7238 else if (TREE_CODE (arg1) == INTEGER_CST)
7240 arg10 = build_one_cst (type);
7241 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7242 the purpose of this canonicalization. */
7243 if (TREE_INT_CST_HIGH (arg1) == -1
7244 && negate_expr_p (arg1)
7245 && code == PLUS_EXPR)
7247 arg11 = negate_expr (arg1);
7248 code = MINUS_EXPR;
7250 else
7251 arg11 = arg1;
7253 else
7255 /* We cannot generate constant 1 for fract. */
7256 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7257 return NULL_TREE;
7258 arg10 = arg1;
7259 arg11 = build_one_cst (type);
7261 same = NULL_TREE;
7263 if (operand_equal_p (arg01, arg11, 0))
7264 same = arg01, alt0 = arg00, alt1 = arg10;
7265 else if (operand_equal_p (arg00, arg10, 0))
7266 same = arg00, alt0 = arg01, alt1 = arg11;
7267 else if (operand_equal_p (arg00, arg11, 0))
7268 same = arg00, alt0 = arg01, alt1 = arg10;
7269 else if (operand_equal_p (arg01, arg10, 0))
7270 same = arg01, alt0 = arg00, alt1 = arg11;
7272 /* No identical multiplicands; see if we can find a common
7273 power-of-two factor in non-power-of-two multiplies. This
7274 can help in multi-dimensional array access. */
7275 else if (tree_fits_shwi_p (arg01)
7276 && tree_fits_shwi_p (arg11))
7278 HOST_WIDE_INT int01, int11, tmp;
7279 bool swap = false;
7280 tree maybe_same;
7281 int01 = tree_to_shwi (arg01);
7282 int11 = tree_to_shwi (arg11);
7284 /* Move min of absolute values to int11. */
7285 if (absu_hwi (int01) < absu_hwi (int11))
7287 tmp = int01, int01 = int11, int11 = tmp;
7288 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7289 maybe_same = arg01;
7290 swap = true;
7292 else
7293 maybe_same = arg11;
7295 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7296 /* The remainder should not be a constant, otherwise we
7297 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7298 increased the number of multiplications necessary. */
7299 && TREE_CODE (arg10) != INTEGER_CST)
7301 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7302 build_int_cst (TREE_TYPE (arg00),
7303 int01 / int11));
7304 alt1 = arg10;
7305 same = maybe_same;
7306 if (swap)
7307 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7311 if (same)
7312 return fold_build2_loc (loc, MULT_EXPR, type,
7313 fold_build2_loc (loc, code, type,
7314 fold_convert_loc (loc, type, alt0),
7315 fold_convert_loc (loc, type, alt1)),
7316 fold_convert_loc (loc, type, same));
7318 return NULL_TREE;
7321 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7322 specified by EXPR into the buffer PTR of length LEN bytes.
7323 Return the number of bytes placed in the buffer, or zero
7324 upon failure. */
7326 static int
7327 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7329 tree type = TREE_TYPE (expr);
7330 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7331 int byte, offset, word, words;
7332 unsigned char value;
7334 if (total_bytes > len)
7335 return 0;
7336 words = total_bytes / UNITS_PER_WORD;
7338 for (byte = 0; byte < total_bytes; byte++)
7340 int bitpos = byte * BITS_PER_UNIT;
7341 if (bitpos < HOST_BITS_PER_WIDE_INT)
7342 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7343 else
7344 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7345 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7347 if (total_bytes > UNITS_PER_WORD)
7349 word = byte / UNITS_PER_WORD;
7350 if (WORDS_BIG_ENDIAN)
7351 word = (words - 1) - word;
7352 offset = word * UNITS_PER_WORD;
7353 if (BYTES_BIG_ENDIAN)
7354 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7355 else
7356 offset += byte % UNITS_PER_WORD;
7358 else
7359 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7360 ptr[offset] = value;
7362 return total_bytes;
7366 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7367 specified by EXPR into the buffer PTR of length LEN bytes.
7368 Return the number of bytes placed in the buffer, or zero
7369 upon failure. */
7371 static int
7372 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7374 tree type = TREE_TYPE (expr);
7375 enum machine_mode mode = TYPE_MODE (type);
7376 int total_bytes = GET_MODE_SIZE (mode);
7377 FIXED_VALUE_TYPE value;
7378 tree i_value, i_type;
7380 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7381 return 0;
7383 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7385 if (NULL_TREE == i_type
7386 || TYPE_PRECISION (i_type) != total_bytes)
7387 return 0;
7389 value = TREE_FIXED_CST (expr);
7390 i_value = double_int_to_tree (i_type, value.data);
7392 return native_encode_int (i_value, ptr, len);
7396 /* Subroutine of native_encode_expr. Encode the REAL_CST
7397 specified by EXPR into the buffer PTR of length LEN bytes.
7398 Return the number of bytes placed in the buffer, or zero
7399 upon failure. */
7401 static int
7402 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7404 tree type = TREE_TYPE (expr);
7405 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7406 int byte, offset, word, words, bitpos;
7407 unsigned char value;
7409 /* There are always 32 bits in each long, no matter the size of
7410 the hosts long. We handle floating point representations with
7411 up to 192 bits. */
7412 long tmp[6];
7414 if (total_bytes > len)
7415 return 0;
7416 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7418 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7420 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7421 bitpos += BITS_PER_UNIT)
7423 byte = (bitpos / BITS_PER_UNIT) & 3;
7424 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7426 if (UNITS_PER_WORD < 4)
7428 word = byte / UNITS_PER_WORD;
7429 if (WORDS_BIG_ENDIAN)
7430 word = (words - 1) - word;
7431 offset = word * UNITS_PER_WORD;
7432 if (BYTES_BIG_ENDIAN)
7433 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7434 else
7435 offset += byte % UNITS_PER_WORD;
7437 else
7438 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7439 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7441 return total_bytes;
7444 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7445 specified by EXPR into the buffer PTR of length LEN bytes.
7446 Return the number of bytes placed in the buffer, or zero
7447 upon failure. */
7449 static int
7450 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7452 int rsize, isize;
7453 tree part;
7455 part = TREE_REALPART (expr);
7456 rsize = native_encode_expr (part, ptr, len);
7457 if (rsize == 0)
7458 return 0;
7459 part = TREE_IMAGPART (expr);
7460 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7461 if (isize != rsize)
7462 return 0;
7463 return rsize + isize;
7467 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7468 specified by EXPR into the buffer PTR of length LEN bytes.
7469 Return the number of bytes placed in the buffer, or zero
7470 upon failure. */
7472 static int
7473 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7475 unsigned i, count;
7476 int size, offset;
7477 tree itype, elem;
7479 offset = 0;
7480 count = VECTOR_CST_NELTS (expr);
7481 itype = TREE_TYPE (TREE_TYPE (expr));
7482 size = GET_MODE_SIZE (TYPE_MODE (itype));
7483 for (i = 0; i < count; i++)
7485 elem = VECTOR_CST_ELT (expr, i);
7486 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7487 return 0;
7488 offset += size;
7490 return offset;
7494 /* Subroutine of native_encode_expr. Encode the STRING_CST
7495 specified by EXPR into the buffer PTR of length LEN bytes.
7496 Return the number of bytes placed in the buffer, or zero
7497 upon failure. */
7499 static int
7500 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7502 tree type = TREE_TYPE (expr);
7503 HOST_WIDE_INT total_bytes;
7505 if (TREE_CODE (type) != ARRAY_TYPE
7506 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7507 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7508 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7509 return 0;
7510 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7511 if (total_bytes > len)
7512 return 0;
7513 if (TREE_STRING_LENGTH (expr) < total_bytes)
7515 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7516 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7517 total_bytes - TREE_STRING_LENGTH (expr));
7519 else
7520 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7521 return total_bytes;
7525 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7526 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7527 buffer PTR of length LEN bytes. Return the number of bytes
7528 placed in the buffer, or zero upon failure. */
7531 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7533 switch (TREE_CODE (expr))
7535 case INTEGER_CST:
7536 return native_encode_int (expr, ptr, len);
7538 case REAL_CST:
7539 return native_encode_real (expr, ptr, len);
7541 case FIXED_CST:
7542 return native_encode_fixed (expr, ptr, len);
7544 case COMPLEX_CST:
7545 return native_encode_complex (expr, ptr, len);
7547 case VECTOR_CST:
7548 return native_encode_vector (expr, ptr, len);
7550 case STRING_CST:
7551 return native_encode_string (expr, ptr, len);
7553 default:
7554 return 0;
7559 /* Subroutine of native_interpret_expr. Interpret the contents of
7560 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7561 If the buffer cannot be interpreted, return NULL_TREE. */
7563 static tree
7564 native_interpret_int (tree type, const unsigned char *ptr, int len)
7566 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7567 double_int result;
7569 if (total_bytes > len
7570 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7571 return NULL_TREE;
7573 result = double_int::from_buffer (ptr, total_bytes);
7575 return double_int_to_tree (type, result);
7579 /* Subroutine of native_interpret_expr. Interpret the contents of
7580 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7581 If the buffer cannot be interpreted, return NULL_TREE. */
7583 static tree
7584 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7586 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7587 double_int result;
7588 FIXED_VALUE_TYPE fixed_value;
7590 if (total_bytes > len
7591 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7592 return NULL_TREE;
7594 result = double_int::from_buffer (ptr, total_bytes);
7595 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7597 return build_fixed (type, fixed_value);
7601 /* Subroutine of native_interpret_expr. Interpret the contents of
7602 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7603 If the buffer cannot be interpreted, return NULL_TREE. */
7605 static tree
7606 native_interpret_real (tree type, const unsigned char *ptr, int len)
7608 enum machine_mode mode = TYPE_MODE (type);
7609 int total_bytes = GET_MODE_SIZE (mode);
7610 int byte, offset, word, words, bitpos;
7611 unsigned char value;
7612 /* There are always 32 bits in each long, no matter the size of
7613 the hosts long. We handle floating point representations with
7614 up to 192 bits. */
7615 REAL_VALUE_TYPE r;
7616 long tmp[6];
7618 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7619 if (total_bytes > len || total_bytes > 24)
7620 return NULL_TREE;
7621 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7623 memset (tmp, 0, sizeof (tmp));
7624 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7625 bitpos += BITS_PER_UNIT)
7627 byte = (bitpos / BITS_PER_UNIT) & 3;
7628 if (UNITS_PER_WORD < 4)
7630 word = byte / UNITS_PER_WORD;
7631 if (WORDS_BIG_ENDIAN)
7632 word = (words - 1) - word;
7633 offset = word * UNITS_PER_WORD;
7634 if (BYTES_BIG_ENDIAN)
7635 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7636 else
7637 offset += byte % UNITS_PER_WORD;
7639 else
7640 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7641 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7643 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7646 real_from_target (&r, tmp, mode);
7647 return build_real (type, r);
7651 /* Subroutine of native_interpret_expr. Interpret the contents of
7652 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7653 If the buffer cannot be interpreted, return NULL_TREE. */
7655 static tree
7656 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7658 tree etype, rpart, ipart;
7659 int size;
7661 etype = TREE_TYPE (type);
7662 size = GET_MODE_SIZE (TYPE_MODE (etype));
7663 if (size * 2 > len)
7664 return NULL_TREE;
7665 rpart = native_interpret_expr (etype, ptr, size);
7666 if (!rpart)
7667 return NULL_TREE;
7668 ipart = native_interpret_expr (etype, ptr+size, size);
7669 if (!ipart)
7670 return NULL_TREE;
7671 return build_complex (type, rpart, ipart);
7675 /* Subroutine of native_interpret_expr. Interpret the contents of
7676 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7677 If the buffer cannot be interpreted, return NULL_TREE. */
7679 static tree
7680 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7682 tree etype, elem;
7683 int i, size, count;
7684 tree *elements;
7686 etype = TREE_TYPE (type);
7687 size = GET_MODE_SIZE (TYPE_MODE (etype));
7688 count = TYPE_VECTOR_SUBPARTS (type);
7689 if (size * count > len)
7690 return NULL_TREE;
7692 elements = XALLOCAVEC (tree, count);
7693 for (i = count - 1; i >= 0; i--)
7695 elem = native_interpret_expr (etype, ptr+(i*size), size);
7696 if (!elem)
7697 return NULL_TREE;
7698 elements[i] = elem;
7700 return build_vector (type, elements);
7704 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7705 the buffer PTR of length LEN as a constant of type TYPE. For
7706 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7707 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7708 return NULL_TREE. */
7710 tree
7711 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7713 switch (TREE_CODE (type))
7715 case INTEGER_TYPE:
7716 case ENUMERAL_TYPE:
7717 case BOOLEAN_TYPE:
7718 case POINTER_TYPE:
7719 case REFERENCE_TYPE:
7720 return native_interpret_int (type, ptr, len);
7722 case REAL_TYPE:
7723 return native_interpret_real (type, ptr, len);
7725 case FIXED_POINT_TYPE:
7726 return native_interpret_fixed (type, ptr, len);
7728 case COMPLEX_TYPE:
7729 return native_interpret_complex (type, ptr, len);
7731 case VECTOR_TYPE:
7732 return native_interpret_vector (type, ptr, len);
7734 default:
7735 return NULL_TREE;
7739 /* Returns true if we can interpret the contents of a native encoding
7740 as TYPE. */
7742 static bool
7743 can_native_interpret_type_p (tree type)
7745 switch (TREE_CODE (type))
7747 case INTEGER_TYPE:
7748 case ENUMERAL_TYPE:
7749 case BOOLEAN_TYPE:
7750 case POINTER_TYPE:
7751 case REFERENCE_TYPE:
7752 case FIXED_POINT_TYPE:
7753 case REAL_TYPE:
7754 case COMPLEX_TYPE:
7755 case VECTOR_TYPE:
7756 return true;
7757 default:
7758 return false;
7762 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7763 TYPE at compile-time. If we're unable to perform the conversion
7764 return NULL_TREE. */
7766 static tree
7767 fold_view_convert_expr (tree type, tree expr)
7769 /* We support up to 512-bit values (for V8DFmode). */
7770 unsigned char buffer[64];
7771 int len;
7773 /* Check that the host and target are sane. */
7774 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7775 return NULL_TREE;
7777 len = native_encode_expr (expr, buffer, sizeof (buffer));
7778 if (len == 0)
7779 return NULL_TREE;
7781 return native_interpret_expr (type, buffer, len);
7784 /* Build an expression for the address of T. Folds away INDIRECT_REF
7785 to avoid confusing the gimplify process. */
7787 tree
7788 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7790 /* The size of the object is not relevant when talking about its address. */
7791 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7792 t = TREE_OPERAND (t, 0);
7794 if (TREE_CODE (t) == INDIRECT_REF)
7796 t = TREE_OPERAND (t, 0);
7798 if (TREE_TYPE (t) != ptrtype)
7799 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7801 else if (TREE_CODE (t) == MEM_REF
7802 && integer_zerop (TREE_OPERAND (t, 1)))
7803 return TREE_OPERAND (t, 0);
7804 else if (TREE_CODE (t) == MEM_REF
7805 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7806 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7807 TREE_OPERAND (t, 0),
7808 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7809 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7811 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7813 if (TREE_TYPE (t) != ptrtype)
7814 t = fold_convert_loc (loc, ptrtype, t);
7816 else
7817 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7819 return t;
7822 /* Build an expression for the address of T. */
7824 tree
7825 build_fold_addr_expr_loc (location_t loc, tree t)
7827 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7829 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7832 static bool vec_cst_ctor_to_array (tree, tree *);
7834 /* Fold a unary expression of code CODE and type TYPE with operand
7835 OP0. Return the folded expression if folding is successful.
7836 Otherwise, return NULL_TREE. */
7838 tree
7839 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7841 tree tem;
7842 tree arg0;
7843 enum tree_code_class kind = TREE_CODE_CLASS (code);
7845 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7846 && TREE_CODE_LENGTH (code) == 1);
7848 arg0 = op0;
7849 if (arg0)
7851 if (CONVERT_EXPR_CODE_P (code)
7852 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7854 /* Don't use STRIP_NOPS, because signedness of argument type
7855 matters. */
7856 STRIP_SIGN_NOPS (arg0);
7858 else
7860 /* Strip any conversions that don't change the mode. This
7861 is safe for every expression, except for a comparison
7862 expression because its signedness is derived from its
7863 operands.
7865 Note that this is done as an internal manipulation within
7866 the constant folder, in order to find the simplest
7867 representation of the arguments so that their form can be
7868 studied. In any cases, the appropriate type conversions
7869 should be put back in the tree that will get out of the
7870 constant folder. */
7871 STRIP_NOPS (arg0);
7875 if (TREE_CODE_CLASS (code) == tcc_unary)
7877 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7878 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7879 fold_build1_loc (loc, code, type,
7880 fold_convert_loc (loc, TREE_TYPE (op0),
7881 TREE_OPERAND (arg0, 1))));
7882 else if (TREE_CODE (arg0) == COND_EXPR)
7884 tree arg01 = TREE_OPERAND (arg0, 1);
7885 tree arg02 = TREE_OPERAND (arg0, 2);
7886 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7887 arg01 = fold_build1_loc (loc, code, type,
7888 fold_convert_loc (loc,
7889 TREE_TYPE (op0), arg01));
7890 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7891 arg02 = fold_build1_loc (loc, code, type,
7892 fold_convert_loc (loc,
7893 TREE_TYPE (op0), arg02));
7894 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7895 arg01, arg02);
7897 /* If this was a conversion, and all we did was to move into
7898 inside the COND_EXPR, bring it back out. But leave it if
7899 it is a conversion from integer to integer and the
7900 result precision is no wider than a word since such a
7901 conversion is cheap and may be optimized away by combine,
7902 while it couldn't if it were outside the COND_EXPR. Then return
7903 so we don't get into an infinite recursion loop taking the
7904 conversion out and then back in. */
7906 if ((CONVERT_EXPR_CODE_P (code)
7907 || code == NON_LVALUE_EXPR)
7908 && TREE_CODE (tem) == COND_EXPR
7909 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7910 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7911 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7912 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7913 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7914 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7915 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7916 && (INTEGRAL_TYPE_P
7917 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7918 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7919 || flag_syntax_only))
7920 tem = build1_loc (loc, code, type,
7921 build3 (COND_EXPR,
7922 TREE_TYPE (TREE_OPERAND
7923 (TREE_OPERAND (tem, 1), 0)),
7924 TREE_OPERAND (tem, 0),
7925 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7926 TREE_OPERAND (TREE_OPERAND (tem, 2),
7927 0)));
7928 return tem;
7932 switch (code)
7934 case PAREN_EXPR:
7935 /* Re-association barriers around constants and other re-association
7936 barriers can be removed. */
7937 if (CONSTANT_CLASS_P (op0)
7938 || TREE_CODE (op0) == PAREN_EXPR)
7939 return fold_convert_loc (loc, type, op0);
7940 return NULL_TREE;
7942 case NON_LVALUE_EXPR:
7943 if (!maybe_lvalue_p (op0))
7944 return fold_convert_loc (loc, type, op0);
7945 return NULL_TREE;
7947 CASE_CONVERT:
7948 case FLOAT_EXPR:
7949 case FIX_TRUNC_EXPR:
7950 if (TREE_TYPE (op0) == type)
7951 return op0;
7953 if (COMPARISON_CLASS_P (op0))
7955 /* If we have (type) (a CMP b) and type is an integral type, return
7956 new expression involving the new type. Canonicalize
7957 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7958 non-integral type.
7959 Do not fold the result as that would not simplify further, also
7960 folding again results in recursions. */
7961 if (TREE_CODE (type) == BOOLEAN_TYPE)
7962 return build2_loc (loc, TREE_CODE (op0), type,
7963 TREE_OPERAND (op0, 0),
7964 TREE_OPERAND (op0, 1));
7965 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7966 && TREE_CODE (type) != VECTOR_TYPE)
7967 return build3_loc (loc, COND_EXPR, type, op0,
7968 constant_boolean_node (true, type),
7969 constant_boolean_node (false, type));
7972 /* Handle cases of two conversions in a row. */
7973 if (CONVERT_EXPR_P (op0))
7975 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7976 tree inter_type = TREE_TYPE (op0);
7977 int inside_int = INTEGRAL_TYPE_P (inside_type);
7978 int inside_ptr = POINTER_TYPE_P (inside_type);
7979 int inside_float = FLOAT_TYPE_P (inside_type);
7980 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7981 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7982 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7983 int inter_int = INTEGRAL_TYPE_P (inter_type);
7984 int inter_ptr = POINTER_TYPE_P (inter_type);
7985 int inter_float = FLOAT_TYPE_P (inter_type);
7986 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7987 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7988 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7989 int final_int = INTEGRAL_TYPE_P (type);
7990 int final_ptr = POINTER_TYPE_P (type);
7991 int final_float = FLOAT_TYPE_P (type);
7992 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7993 unsigned int final_prec = TYPE_PRECISION (type);
7994 int final_unsignedp = TYPE_UNSIGNED (type);
7996 /* In addition to the cases of two conversions in a row
7997 handled below, if we are converting something to its own
7998 type via an object of identical or wider precision, neither
7999 conversion is needed. */
8000 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8001 && (((inter_int || inter_ptr) && final_int)
8002 || (inter_float && final_float))
8003 && inter_prec >= final_prec)
8004 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8006 /* Likewise, if the intermediate and initial types are either both
8007 float or both integer, we don't need the middle conversion if the
8008 former is wider than the latter and doesn't change the signedness
8009 (for integers). Avoid this if the final type is a pointer since
8010 then we sometimes need the middle conversion. Likewise if the
8011 final type has a precision not equal to the size of its mode. */
8012 if (((inter_int && inside_int) || (inter_float && inside_float))
8013 && (final_int || final_float)
8014 && inter_prec >= inside_prec
8015 && (inter_float || inter_unsignedp == inside_unsignedp)
8016 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8017 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8018 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8020 /* If we have a sign-extension of a zero-extended value, we can
8021 replace that by a single zero-extension. Likewise if the
8022 final conversion does not change precision we can drop the
8023 intermediate conversion. */
8024 if (inside_int && inter_int && final_int
8025 && ((inside_prec < inter_prec && inter_prec < final_prec
8026 && inside_unsignedp && !inter_unsignedp)
8027 || final_prec == inter_prec))
8028 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8030 /* Two conversions in a row are not needed unless:
8031 - some conversion is floating-point (overstrict for now), or
8032 - some conversion is a vector (overstrict for now), or
8033 - the intermediate type is narrower than both initial and
8034 final, or
8035 - the intermediate type and innermost type differ in signedness,
8036 and the outermost type is wider than the intermediate, or
8037 - the initial type is a pointer type and the precisions of the
8038 intermediate and final types differ, or
8039 - the final type is a pointer type and the precisions of the
8040 initial and intermediate types differ. */
8041 if (! inside_float && ! inter_float && ! final_float
8042 && ! inside_vec && ! inter_vec && ! final_vec
8043 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8044 && ! (inside_int && inter_int
8045 && inter_unsignedp != inside_unsignedp
8046 && inter_prec < final_prec)
8047 && ((inter_unsignedp && inter_prec > inside_prec)
8048 == (final_unsignedp && final_prec > inter_prec))
8049 && ! (inside_ptr && inter_prec != final_prec)
8050 && ! (final_ptr && inside_prec != inter_prec)
8051 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8052 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8053 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8056 /* Handle (T *)&A.B.C for A being of type T and B and C
8057 living at offset zero. This occurs frequently in
8058 C++ upcasting and then accessing the base. */
8059 if (TREE_CODE (op0) == ADDR_EXPR
8060 && POINTER_TYPE_P (type)
8061 && handled_component_p (TREE_OPERAND (op0, 0)))
8063 HOST_WIDE_INT bitsize, bitpos;
8064 tree offset;
8065 enum machine_mode mode;
8066 int unsignedp, volatilep;
8067 tree base = TREE_OPERAND (op0, 0);
8068 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8069 &mode, &unsignedp, &volatilep, false);
8070 /* If the reference was to a (constant) zero offset, we can use
8071 the address of the base if it has the same base type
8072 as the result type and the pointer type is unqualified. */
8073 if (! offset && bitpos == 0
8074 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8075 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8076 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8077 return fold_convert_loc (loc, type,
8078 build_fold_addr_expr_loc (loc, base));
8081 if (TREE_CODE (op0) == MODIFY_EXPR
8082 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8083 /* Detect assigning a bitfield. */
8084 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8085 && DECL_BIT_FIELD
8086 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8088 /* Don't leave an assignment inside a conversion
8089 unless assigning a bitfield. */
8090 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8091 /* First do the assignment, then return converted constant. */
8092 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8093 TREE_NO_WARNING (tem) = 1;
8094 TREE_USED (tem) = 1;
8095 return tem;
8098 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8099 constants (if x has signed type, the sign bit cannot be set
8100 in c). This folds extension into the BIT_AND_EXPR.
8101 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8102 very likely don't have maximal range for their precision and this
8103 transformation effectively doesn't preserve non-maximal ranges. */
8104 if (TREE_CODE (type) == INTEGER_TYPE
8105 && TREE_CODE (op0) == BIT_AND_EXPR
8106 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8108 tree and_expr = op0;
8109 tree and0 = TREE_OPERAND (and_expr, 0);
8110 tree and1 = TREE_OPERAND (and_expr, 1);
8111 int change = 0;
8113 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8114 || (TYPE_PRECISION (type)
8115 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8116 change = 1;
8117 else if (TYPE_PRECISION (TREE_TYPE (and1))
8118 <= HOST_BITS_PER_WIDE_INT
8119 && tree_fits_uhwi_p (and1))
8121 unsigned HOST_WIDE_INT cst;
8123 cst = tree_to_uhwi (and1);
8124 cst &= HOST_WIDE_INT_M1U
8125 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8126 change = (cst == 0);
8127 #ifdef LOAD_EXTEND_OP
8128 if (change
8129 && !flag_syntax_only
8130 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8131 == ZERO_EXTEND))
8133 tree uns = unsigned_type_for (TREE_TYPE (and0));
8134 and0 = fold_convert_loc (loc, uns, and0);
8135 and1 = fold_convert_loc (loc, uns, and1);
8137 #endif
8139 if (change)
8141 tem = force_fit_type_double (type, tree_to_double_int (and1),
8142 0, TREE_OVERFLOW (and1));
8143 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8144 fold_convert_loc (loc, type, and0), tem);
8148 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8149 when one of the new casts will fold away. Conservatively we assume
8150 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8151 if (POINTER_TYPE_P (type)
8152 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8153 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8154 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8155 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8156 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8158 tree arg00 = TREE_OPERAND (arg0, 0);
8159 tree arg01 = TREE_OPERAND (arg0, 1);
8161 return fold_build_pointer_plus_loc
8162 (loc, fold_convert_loc (loc, type, arg00), arg01);
8165 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8166 of the same precision, and X is an integer type not narrower than
8167 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8168 if (INTEGRAL_TYPE_P (type)
8169 && TREE_CODE (op0) == BIT_NOT_EXPR
8170 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8171 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8172 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8174 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8175 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8176 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8177 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8178 fold_convert_loc (loc, type, tem));
8181 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8182 type of X and Y (integer types only). */
8183 if (INTEGRAL_TYPE_P (type)
8184 && TREE_CODE (op0) == MULT_EXPR
8185 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8186 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8188 /* Be careful not to introduce new overflows. */
8189 tree mult_type;
8190 if (TYPE_OVERFLOW_WRAPS (type))
8191 mult_type = type;
8192 else
8193 mult_type = unsigned_type_for (type);
8195 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8197 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8198 fold_convert_loc (loc, mult_type,
8199 TREE_OPERAND (op0, 0)),
8200 fold_convert_loc (loc, mult_type,
8201 TREE_OPERAND (op0, 1)));
8202 return fold_convert_loc (loc, type, tem);
8206 tem = fold_convert_const (code, type, arg0);
8207 return tem ? tem : NULL_TREE;
8209 case ADDR_SPACE_CONVERT_EXPR:
8210 if (integer_zerop (arg0))
8211 return fold_convert_const (code, type, arg0);
8212 return NULL_TREE;
8214 case FIXED_CONVERT_EXPR:
8215 tem = fold_convert_const (code, type, arg0);
8216 return tem ? tem : NULL_TREE;
8218 case VIEW_CONVERT_EXPR:
8219 if (TREE_TYPE (op0) == type)
8220 return op0;
8221 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8222 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8223 type, TREE_OPERAND (op0, 0));
8224 if (TREE_CODE (op0) == MEM_REF)
8225 return fold_build2_loc (loc, MEM_REF, type,
8226 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8228 /* For integral conversions with the same precision or pointer
8229 conversions use a NOP_EXPR instead. */
8230 if ((INTEGRAL_TYPE_P (type)
8231 || POINTER_TYPE_P (type))
8232 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8233 || POINTER_TYPE_P (TREE_TYPE (op0)))
8234 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8235 return fold_convert_loc (loc, type, op0);
8237 /* Strip inner integral conversions that do not change the precision. */
8238 if (CONVERT_EXPR_P (op0)
8239 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8240 || POINTER_TYPE_P (TREE_TYPE (op0)))
8241 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8242 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8243 && (TYPE_PRECISION (TREE_TYPE (op0))
8244 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8245 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8246 type, TREE_OPERAND (op0, 0));
8248 return fold_view_convert_expr (type, op0);
8250 case NEGATE_EXPR:
8251 tem = fold_negate_expr (loc, arg0);
8252 if (tem)
8253 return fold_convert_loc (loc, type, tem);
8254 return NULL_TREE;
8256 case ABS_EXPR:
8257 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8258 return fold_abs_const (arg0, type);
8259 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8260 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8261 /* Convert fabs((double)float) into (double)fabsf(float). */
8262 else if (TREE_CODE (arg0) == NOP_EXPR
8263 && TREE_CODE (type) == REAL_TYPE)
8265 tree targ0 = strip_float_extensions (arg0);
8266 if (targ0 != arg0)
8267 return fold_convert_loc (loc, type,
8268 fold_build1_loc (loc, ABS_EXPR,
8269 TREE_TYPE (targ0),
8270 targ0));
8272 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8273 else if (TREE_CODE (arg0) == ABS_EXPR)
8274 return arg0;
8275 else if (tree_expr_nonnegative_p (arg0))
8276 return arg0;
8278 /* Strip sign ops from argument. */
8279 if (TREE_CODE (type) == REAL_TYPE)
8281 tem = fold_strip_sign_ops (arg0);
8282 if (tem)
8283 return fold_build1_loc (loc, ABS_EXPR, type,
8284 fold_convert_loc (loc, type, tem));
8286 return NULL_TREE;
8288 case CONJ_EXPR:
8289 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8290 return fold_convert_loc (loc, type, arg0);
8291 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8293 tree itype = TREE_TYPE (type);
8294 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8295 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8296 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8297 negate_expr (ipart));
8299 if (TREE_CODE (arg0) == COMPLEX_CST)
8301 tree itype = TREE_TYPE (type);
8302 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8303 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8304 return build_complex (type, rpart, negate_expr (ipart));
8306 if (TREE_CODE (arg0) == CONJ_EXPR)
8307 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8308 return NULL_TREE;
8310 case BIT_NOT_EXPR:
8311 if (TREE_CODE (arg0) == INTEGER_CST)
8312 return fold_not_const (arg0, type);
8313 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8314 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8315 /* Convert ~ (-A) to A - 1. */
8316 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8317 return fold_build2_loc (loc, MINUS_EXPR, type,
8318 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8319 build_int_cst (type, 1));
8320 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8321 else if (INTEGRAL_TYPE_P (type)
8322 && ((TREE_CODE (arg0) == MINUS_EXPR
8323 && integer_onep (TREE_OPERAND (arg0, 1)))
8324 || (TREE_CODE (arg0) == PLUS_EXPR
8325 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8327 /* Perform the negation in ARG0's type and only then convert
8328 to TYPE as to avoid introducing undefined behavior. */
8329 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8330 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8331 TREE_OPERAND (arg0, 0));
8332 return fold_convert_loc (loc, type, t);
8334 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8335 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8336 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8337 fold_convert_loc (loc, type,
8338 TREE_OPERAND (arg0, 0)))))
8339 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8340 fold_convert_loc (loc, type,
8341 TREE_OPERAND (arg0, 1)));
8342 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8343 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8344 fold_convert_loc (loc, type,
8345 TREE_OPERAND (arg0, 1)))))
8346 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8347 fold_convert_loc (loc, type,
8348 TREE_OPERAND (arg0, 0)), tem);
8349 /* Perform BIT_NOT_EXPR on each element individually. */
8350 else if (TREE_CODE (arg0) == VECTOR_CST)
8352 tree *elements;
8353 tree elem;
8354 unsigned count = VECTOR_CST_NELTS (arg0), i;
8356 elements = XALLOCAVEC (tree, count);
8357 for (i = 0; i < count; i++)
8359 elem = VECTOR_CST_ELT (arg0, i);
8360 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8361 if (elem == NULL_TREE)
8362 break;
8363 elements[i] = elem;
8365 if (i == count)
8366 return build_vector (type, elements);
8368 else if (COMPARISON_CLASS_P (arg0)
8369 && (VECTOR_TYPE_P (type)
8370 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8372 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8373 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8374 HONOR_NANS (TYPE_MODE (op_type)));
8375 if (subcode != ERROR_MARK)
8376 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8377 TREE_OPERAND (arg0, 1));
8381 return NULL_TREE;
8383 case TRUTH_NOT_EXPR:
8384 /* Note that the operand of this must be an int
8385 and its values must be 0 or 1.
8386 ("true" is a fixed value perhaps depending on the language,
8387 but we don't handle values other than 1 correctly yet.) */
8388 tem = fold_truth_not_expr (loc, arg0);
8389 if (!tem)
8390 return NULL_TREE;
8391 return fold_convert_loc (loc, type, tem);
8393 case REALPART_EXPR:
8394 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8395 return fold_convert_loc (loc, type, arg0);
8396 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8397 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8398 TREE_OPERAND (arg0, 1));
8399 if (TREE_CODE (arg0) == COMPLEX_CST)
8400 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8401 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8403 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8404 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8405 fold_build1_loc (loc, REALPART_EXPR, itype,
8406 TREE_OPERAND (arg0, 0)),
8407 fold_build1_loc (loc, REALPART_EXPR, itype,
8408 TREE_OPERAND (arg0, 1)));
8409 return fold_convert_loc (loc, type, tem);
8411 if (TREE_CODE (arg0) == CONJ_EXPR)
8413 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8414 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8415 TREE_OPERAND (arg0, 0));
8416 return fold_convert_loc (loc, type, tem);
8418 if (TREE_CODE (arg0) == CALL_EXPR)
8420 tree fn = get_callee_fndecl (arg0);
8421 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8422 switch (DECL_FUNCTION_CODE (fn))
8424 CASE_FLT_FN (BUILT_IN_CEXPI):
8425 fn = mathfn_built_in (type, BUILT_IN_COS);
8426 if (fn)
8427 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8428 break;
8430 default:
8431 break;
8434 return NULL_TREE;
8436 case IMAGPART_EXPR:
8437 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8438 return build_zero_cst (type);
8439 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8440 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8441 TREE_OPERAND (arg0, 0));
8442 if (TREE_CODE (arg0) == COMPLEX_CST)
8443 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8444 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8446 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8447 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8448 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8449 TREE_OPERAND (arg0, 0)),
8450 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8451 TREE_OPERAND (arg0, 1)));
8452 return fold_convert_loc (loc, type, tem);
8454 if (TREE_CODE (arg0) == CONJ_EXPR)
8456 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8457 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8458 return fold_convert_loc (loc, type, negate_expr (tem));
8460 if (TREE_CODE (arg0) == CALL_EXPR)
8462 tree fn = get_callee_fndecl (arg0);
8463 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8464 switch (DECL_FUNCTION_CODE (fn))
8466 CASE_FLT_FN (BUILT_IN_CEXPI):
8467 fn = mathfn_built_in (type, BUILT_IN_SIN);
8468 if (fn)
8469 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8470 break;
8472 default:
8473 break;
8476 return NULL_TREE;
8478 case INDIRECT_REF:
8479 /* Fold *&X to X if X is an lvalue. */
8480 if (TREE_CODE (op0) == ADDR_EXPR)
8482 tree op00 = TREE_OPERAND (op0, 0);
8483 if ((TREE_CODE (op00) == VAR_DECL
8484 || TREE_CODE (op00) == PARM_DECL
8485 || TREE_CODE (op00) == RESULT_DECL)
8486 && !TREE_READONLY (op00))
8487 return op00;
8489 return NULL_TREE;
8491 case VEC_UNPACK_LO_EXPR:
8492 case VEC_UNPACK_HI_EXPR:
8493 case VEC_UNPACK_FLOAT_LO_EXPR:
8494 case VEC_UNPACK_FLOAT_HI_EXPR:
8496 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8497 tree *elts;
8498 enum tree_code subcode;
8500 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8501 if (TREE_CODE (arg0) != VECTOR_CST)
8502 return NULL_TREE;
8504 elts = XALLOCAVEC (tree, nelts * 2);
8505 if (!vec_cst_ctor_to_array (arg0, elts))
8506 return NULL_TREE;
8508 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8509 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8510 elts += nelts;
8512 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8513 subcode = NOP_EXPR;
8514 else
8515 subcode = FLOAT_EXPR;
8517 for (i = 0; i < nelts; i++)
8519 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8520 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8521 return NULL_TREE;
8524 return build_vector (type, elts);
8527 case REDUC_MIN_EXPR:
8528 case REDUC_MAX_EXPR:
8529 case REDUC_PLUS_EXPR:
8531 unsigned int nelts, i;
8532 tree *elts;
8533 enum tree_code subcode;
8535 if (TREE_CODE (op0) != VECTOR_CST)
8536 return NULL_TREE;
8537 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8539 elts = XALLOCAVEC (tree, nelts);
8540 if (!vec_cst_ctor_to_array (op0, elts))
8541 return NULL_TREE;
8543 switch (code)
8545 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8546 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8547 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8548 default: gcc_unreachable ();
8551 for (i = 1; i < nelts; i++)
8553 elts[0] = const_binop (subcode, elts[0], elts[i]);
8554 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8555 return NULL_TREE;
8558 return elts[0];
8561 default:
8562 return NULL_TREE;
8563 } /* switch (code) */
8567 /* If the operation was a conversion do _not_ mark a resulting constant
8568 with TREE_OVERFLOW if the original constant was not. These conversions
8569 have implementation defined behavior and retaining the TREE_OVERFLOW
8570 flag here would confuse later passes such as VRP. */
8571 tree
8572 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8573 tree type, tree op0)
8575 tree res = fold_unary_loc (loc, code, type, op0);
8576 if (res
8577 && TREE_CODE (res) == INTEGER_CST
8578 && TREE_CODE (op0) == INTEGER_CST
8579 && CONVERT_EXPR_CODE_P (code))
8580 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8582 return res;
8585 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8586 operands OP0 and OP1. LOC is the location of the resulting expression.
8587 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8588 Return the folded expression if folding is successful. Otherwise,
8589 return NULL_TREE. */
8590 static tree
8591 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8592 tree arg0, tree arg1, tree op0, tree op1)
8594 tree tem;
8596 /* We only do these simplifications if we are optimizing. */
8597 if (!optimize)
8598 return NULL_TREE;
8600 /* Check for things like (A || B) && (A || C). We can convert this
8601 to A || (B && C). Note that either operator can be any of the four
8602 truth and/or operations and the transformation will still be
8603 valid. Also note that we only care about order for the
8604 ANDIF and ORIF operators. If B contains side effects, this
8605 might change the truth-value of A. */
8606 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8607 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8608 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8609 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8610 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8611 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8613 tree a00 = TREE_OPERAND (arg0, 0);
8614 tree a01 = TREE_OPERAND (arg0, 1);
8615 tree a10 = TREE_OPERAND (arg1, 0);
8616 tree a11 = TREE_OPERAND (arg1, 1);
8617 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8618 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8619 && (code == TRUTH_AND_EXPR
8620 || code == TRUTH_OR_EXPR));
8622 if (operand_equal_p (a00, a10, 0))
8623 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8624 fold_build2_loc (loc, code, type, a01, a11));
8625 else if (commutative && operand_equal_p (a00, a11, 0))
8626 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8627 fold_build2_loc (loc, code, type, a01, a10));
8628 else if (commutative && operand_equal_p (a01, a10, 0))
8629 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8630 fold_build2_loc (loc, code, type, a00, a11));
8632 /* This case if tricky because we must either have commutative
8633 operators or else A10 must not have side-effects. */
8635 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8636 && operand_equal_p (a01, a11, 0))
8637 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8638 fold_build2_loc (loc, code, type, a00, a10),
8639 a01);
8642 /* See if we can build a range comparison. */
8643 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8644 return tem;
8646 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8647 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8649 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8650 if (tem)
8651 return fold_build2_loc (loc, code, type, tem, arg1);
8654 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8655 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8657 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8658 if (tem)
8659 return fold_build2_loc (loc, code, type, arg0, tem);
8662 /* Check for the possibility of merging component references. If our
8663 lhs is another similar operation, try to merge its rhs with our
8664 rhs. Then try to merge our lhs and rhs. */
8665 if (TREE_CODE (arg0) == code
8666 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8667 TREE_OPERAND (arg0, 1), arg1)))
8668 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8670 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8671 return tem;
8673 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8674 && (code == TRUTH_AND_EXPR
8675 || code == TRUTH_ANDIF_EXPR
8676 || code == TRUTH_OR_EXPR
8677 || code == TRUTH_ORIF_EXPR))
8679 enum tree_code ncode, icode;
8681 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8682 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8683 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8685 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8686 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8687 We don't want to pack more than two leafs to a non-IF AND/OR
8688 expression.
8689 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8690 equal to IF-CODE, then we don't want to add right-hand operand.
8691 If the inner right-hand side of left-hand operand has
8692 side-effects, or isn't simple, then we can't add to it,
8693 as otherwise we might destroy if-sequence. */
8694 if (TREE_CODE (arg0) == icode
8695 && simple_operand_p_2 (arg1)
8696 /* Needed for sequence points to handle trappings, and
8697 side-effects. */
8698 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8700 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8701 arg1);
8702 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8703 tem);
8705 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8706 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8707 else if (TREE_CODE (arg1) == icode
8708 && simple_operand_p_2 (arg0)
8709 /* Needed for sequence points to handle trappings, and
8710 side-effects. */
8711 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8713 tem = fold_build2_loc (loc, ncode, type,
8714 arg0, TREE_OPERAND (arg1, 0));
8715 return fold_build2_loc (loc, icode, type, tem,
8716 TREE_OPERAND (arg1, 1));
8718 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8719 into (A OR B).
8720 For sequence point consistancy, we need to check for trapping,
8721 and side-effects. */
8722 else if (code == icode && simple_operand_p_2 (arg0)
8723 && simple_operand_p_2 (arg1))
8724 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8727 return NULL_TREE;
8730 /* Fold a binary expression of code CODE and type TYPE with operands
8731 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8732 Return the folded expression if folding is successful. Otherwise,
8733 return NULL_TREE. */
8735 static tree
8736 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8738 enum tree_code compl_code;
8740 if (code == MIN_EXPR)
8741 compl_code = MAX_EXPR;
8742 else if (code == MAX_EXPR)
8743 compl_code = MIN_EXPR;
8744 else
8745 gcc_unreachable ();
8747 /* MIN (MAX (a, b), b) == b. */
8748 if (TREE_CODE (op0) == compl_code
8749 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8750 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8752 /* MIN (MAX (b, a), b) == b. */
8753 if (TREE_CODE (op0) == compl_code
8754 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8755 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8756 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8758 /* MIN (a, MAX (a, b)) == a. */
8759 if (TREE_CODE (op1) == compl_code
8760 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8761 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8762 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8764 /* MIN (a, MAX (b, a)) == a. */
8765 if (TREE_CODE (op1) == compl_code
8766 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8767 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8768 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8770 return NULL_TREE;
8773 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8774 by changing CODE to reduce the magnitude of constants involved in
8775 ARG0 of the comparison.
8776 Returns a canonicalized comparison tree if a simplification was
8777 possible, otherwise returns NULL_TREE.
8778 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8779 valid if signed overflow is undefined. */
8781 static tree
8782 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8783 tree arg0, tree arg1,
8784 bool *strict_overflow_p)
8786 enum tree_code code0 = TREE_CODE (arg0);
8787 tree t, cst0 = NULL_TREE;
8788 int sgn0;
8789 bool swap = false;
8791 /* Match A +- CST code arg1 and CST code arg1. We can change the
8792 first form only if overflow is undefined. */
8793 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8794 /* In principle pointers also have undefined overflow behavior,
8795 but that causes problems elsewhere. */
8796 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8797 && (code0 == MINUS_EXPR
8798 || code0 == PLUS_EXPR)
8799 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8800 || code0 == INTEGER_CST))
8801 return NULL_TREE;
8803 /* Identify the constant in arg0 and its sign. */
8804 if (code0 == INTEGER_CST)
8805 cst0 = arg0;
8806 else
8807 cst0 = TREE_OPERAND (arg0, 1);
8808 sgn0 = tree_int_cst_sgn (cst0);
8810 /* Overflowed constants and zero will cause problems. */
8811 if (integer_zerop (cst0)
8812 || TREE_OVERFLOW (cst0))
8813 return NULL_TREE;
8815 /* See if we can reduce the magnitude of the constant in
8816 arg0 by changing the comparison code. */
8817 if (code0 == INTEGER_CST)
8819 /* CST <= arg1 -> CST-1 < arg1. */
8820 if (code == LE_EXPR && sgn0 == 1)
8821 code = LT_EXPR;
8822 /* -CST < arg1 -> -CST-1 <= arg1. */
8823 else if (code == LT_EXPR && sgn0 == -1)
8824 code = LE_EXPR;
8825 /* CST > arg1 -> CST-1 >= arg1. */
8826 else if (code == GT_EXPR && sgn0 == 1)
8827 code = GE_EXPR;
8828 /* -CST >= arg1 -> -CST-1 > arg1. */
8829 else if (code == GE_EXPR && sgn0 == -1)
8830 code = GT_EXPR;
8831 else
8832 return NULL_TREE;
8833 /* arg1 code' CST' might be more canonical. */
8834 swap = true;
8836 else
8838 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8839 if (code == LT_EXPR
8840 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8841 code = LE_EXPR;
8842 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8843 else if (code == GT_EXPR
8844 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8845 code = GE_EXPR;
8846 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8847 else if (code == LE_EXPR
8848 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8849 code = LT_EXPR;
8850 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8851 else if (code == GE_EXPR
8852 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8853 code = GT_EXPR;
8854 else
8855 return NULL_TREE;
8856 *strict_overflow_p = true;
8859 /* Now build the constant reduced in magnitude. But not if that
8860 would produce one outside of its types range. */
8861 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8862 && ((sgn0 == 1
8863 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8864 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8865 || (sgn0 == -1
8866 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8867 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8868 /* We cannot swap the comparison here as that would cause us to
8869 endlessly recurse. */
8870 return NULL_TREE;
8872 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8873 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8874 if (code0 != INTEGER_CST)
8875 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8876 t = fold_convert (TREE_TYPE (arg1), t);
8878 /* If swapping might yield to a more canonical form, do so. */
8879 if (swap)
8880 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8881 else
8882 return fold_build2_loc (loc, code, type, t, arg1);
8885 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8886 overflow further. Try to decrease the magnitude of constants involved
8887 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8888 and put sole constants at the second argument position.
8889 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8891 static tree
8892 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8893 tree arg0, tree arg1)
8895 tree t;
8896 bool strict_overflow_p;
8897 const char * const warnmsg = G_("assuming signed overflow does not occur "
8898 "when reducing constant in comparison");
8900 /* Try canonicalization by simplifying arg0. */
8901 strict_overflow_p = false;
8902 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8903 &strict_overflow_p);
8904 if (t)
8906 if (strict_overflow_p)
8907 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8908 return t;
8911 /* Try canonicalization by simplifying arg1 using the swapped
8912 comparison. */
8913 code = swap_tree_comparison (code);
8914 strict_overflow_p = false;
8915 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8916 &strict_overflow_p);
8917 if (t && strict_overflow_p)
8918 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8919 return t;
8922 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8923 space. This is used to avoid issuing overflow warnings for
8924 expressions like &p->x which can not wrap. */
8926 static bool
8927 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8929 double_int di_offset, total;
8931 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8932 return true;
8934 if (bitpos < 0)
8935 return true;
8937 if (offset == NULL_TREE)
8938 di_offset = double_int_zero;
8939 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8940 return true;
8941 else
8942 di_offset = TREE_INT_CST (offset);
8944 bool overflow;
8945 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8946 total = di_offset.add_with_sign (units, true, &overflow);
8947 if (overflow)
8948 return true;
8950 if (total.high != 0)
8951 return true;
8953 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8954 if (size <= 0)
8955 return true;
8957 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8958 array. */
8959 if (TREE_CODE (base) == ADDR_EXPR)
8961 HOST_WIDE_INT base_size;
8963 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8964 if (base_size > 0 && size < base_size)
8965 size = base_size;
8968 return total.low > (unsigned HOST_WIDE_INT) size;
8971 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8972 kind INTEGER_CST. This makes sure to properly sign-extend the
8973 constant. */
8975 static HOST_WIDE_INT
8976 size_low_cst (const_tree t)
8978 double_int d = tree_to_double_int (t);
8979 return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
8982 /* Subroutine of fold_binary. This routine performs all of the
8983 transformations that are common to the equality/inequality
8984 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8985 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8986 fold_binary should call fold_binary. Fold a comparison with
8987 tree code CODE and type TYPE with operands OP0 and OP1. Return
8988 the folded comparison or NULL_TREE. */
8990 static tree
8991 fold_comparison (location_t loc, enum tree_code code, tree type,
8992 tree op0, tree op1)
8994 tree arg0, arg1, tem;
8996 arg0 = op0;
8997 arg1 = op1;
8999 STRIP_SIGN_NOPS (arg0);
9000 STRIP_SIGN_NOPS (arg1);
9002 tem = fold_relational_const (code, type, arg0, arg1);
9003 if (tem != NULL_TREE)
9004 return tem;
9006 /* If one arg is a real or integer constant, put it last. */
9007 if (tree_swap_operands_p (arg0, arg1, true))
9008 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9010 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9011 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9012 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9013 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9014 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9015 && (TREE_CODE (arg1) == INTEGER_CST
9016 && !TREE_OVERFLOW (arg1)))
9018 tree const1 = TREE_OPERAND (arg0, 1);
9019 tree const2 = arg1;
9020 tree variable = TREE_OPERAND (arg0, 0);
9021 tree lhs;
9022 int lhs_add;
9023 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9025 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9026 TREE_TYPE (arg1), const2, const1);
9028 /* If the constant operation overflowed this can be
9029 simplified as a comparison against INT_MAX/INT_MIN. */
9030 if (TREE_CODE (lhs) == INTEGER_CST
9031 && TREE_OVERFLOW (lhs)
9032 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9034 int const1_sgn = tree_int_cst_sgn (const1);
9035 enum tree_code code2 = code;
9037 /* Get the sign of the constant on the lhs if the
9038 operation were VARIABLE + CONST1. */
9039 if (TREE_CODE (arg0) == MINUS_EXPR)
9040 const1_sgn = -const1_sgn;
9042 /* The sign of the constant determines if we overflowed
9043 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9044 Canonicalize to the INT_MIN overflow by swapping the comparison
9045 if necessary. */
9046 if (const1_sgn == -1)
9047 code2 = swap_tree_comparison (code);
9049 /* We now can look at the canonicalized case
9050 VARIABLE + 1 CODE2 INT_MIN
9051 and decide on the result. */
9052 if (code2 == LT_EXPR
9053 || code2 == LE_EXPR
9054 || code2 == EQ_EXPR)
9055 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9056 else if (code2 == NE_EXPR
9057 || code2 == GE_EXPR
9058 || code2 == GT_EXPR)
9059 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9062 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9063 && (TREE_CODE (lhs) != INTEGER_CST
9064 || !TREE_OVERFLOW (lhs)))
9066 if (code != EQ_EXPR && code != NE_EXPR)
9067 fold_overflow_warning ("assuming signed overflow does not occur "
9068 "when changing X +- C1 cmp C2 to "
9069 "X cmp C1 +- C2",
9070 WARN_STRICT_OVERFLOW_COMPARISON);
9071 return fold_build2_loc (loc, code, type, variable, lhs);
9075 /* For comparisons of pointers we can decompose it to a compile time
9076 comparison of the base objects and the offsets into the object.
9077 This requires at least one operand being an ADDR_EXPR or a
9078 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9079 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9080 && (TREE_CODE (arg0) == ADDR_EXPR
9081 || TREE_CODE (arg1) == ADDR_EXPR
9082 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9083 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9085 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9086 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9087 enum machine_mode mode;
9088 int volatilep, unsignedp;
9089 bool indirect_base0 = false, indirect_base1 = false;
9091 /* Get base and offset for the access. Strip ADDR_EXPR for
9092 get_inner_reference, but put it back by stripping INDIRECT_REF
9093 off the base object if possible. indirect_baseN will be true
9094 if baseN is not an address but refers to the object itself. */
9095 base0 = arg0;
9096 if (TREE_CODE (arg0) == ADDR_EXPR)
9098 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9099 &bitsize, &bitpos0, &offset0, &mode,
9100 &unsignedp, &volatilep, false);
9101 if (TREE_CODE (base0) == INDIRECT_REF)
9102 base0 = TREE_OPERAND (base0, 0);
9103 else
9104 indirect_base0 = true;
9106 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9108 base0 = TREE_OPERAND (arg0, 0);
9109 STRIP_SIGN_NOPS (base0);
9110 if (TREE_CODE (base0) == ADDR_EXPR)
9112 base0 = TREE_OPERAND (base0, 0);
9113 indirect_base0 = true;
9115 offset0 = TREE_OPERAND (arg0, 1);
9116 if (tree_fits_shwi_p (offset0))
9118 HOST_WIDE_INT off = size_low_cst (offset0);
9119 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9120 * BITS_PER_UNIT)
9121 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9123 bitpos0 = off * BITS_PER_UNIT;
9124 offset0 = NULL_TREE;
9129 base1 = arg1;
9130 if (TREE_CODE (arg1) == ADDR_EXPR)
9132 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9133 &bitsize, &bitpos1, &offset1, &mode,
9134 &unsignedp, &volatilep, false);
9135 if (TREE_CODE (base1) == INDIRECT_REF)
9136 base1 = TREE_OPERAND (base1, 0);
9137 else
9138 indirect_base1 = true;
9140 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9142 base1 = TREE_OPERAND (arg1, 0);
9143 STRIP_SIGN_NOPS (base1);
9144 if (TREE_CODE (base1) == ADDR_EXPR)
9146 base1 = TREE_OPERAND (base1, 0);
9147 indirect_base1 = true;
9149 offset1 = TREE_OPERAND (arg1, 1);
9150 if (tree_fits_shwi_p (offset1))
9152 HOST_WIDE_INT off = size_low_cst (offset1);
9153 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9154 * BITS_PER_UNIT)
9155 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9157 bitpos1 = off * BITS_PER_UNIT;
9158 offset1 = NULL_TREE;
9163 /* A local variable can never be pointed to by
9164 the default SSA name of an incoming parameter. */
9165 if ((TREE_CODE (arg0) == ADDR_EXPR
9166 && indirect_base0
9167 && TREE_CODE (base0) == VAR_DECL
9168 && auto_var_in_fn_p (base0, current_function_decl)
9169 && !indirect_base1
9170 && TREE_CODE (base1) == SSA_NAME
9171 && SSA_NAME_IS_DEFAULT_DEF (base1)
9172 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9173 || (TREE_CODE (arg1) == ADDR_EXPR
9174 && indirect_base1
9175 && TREE_CODE (base1) == VAR_DECL
9176 && auto_var_in_fn_p (base1, current_function_decl)
9177 && !indirect_base0
9178 && TREE_CODE (base0) == SSA_NAME
9179 && SSA_NAME_IS_DEFAULT_DEF (base0)
9180 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9182 if (code == NE_EXPR)
9183 return constant_boolean_node (1, type);
9184 else if (code == EQ_EXPR)
9185 return constant_boolean_node (0, type);
9187 /* If we have equivalent bases we might be able to simplify. */
9188 else if (indirect_base0 == indirect_base1
9189 && operand_equal_p (base0, base1, 0))
9191 /* We can fold this expression to a constant if the non-constant
9192 offset parts are equal. */
9193 if ((offset0 == offset1
9194 || (offset0 && offset1
9195 && operand_equal_p (offset0, offset1, 0)))
9196 && (code == EQ_EXPR
9197 || code == NE_EXPR
9198 || (indirect_base0 && DECL_P (base0))
9199 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9202 if (code != EQ_EXPR
9203 && code != NE_EXPR
9204 && bitpos0 != bitpos1
9205 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9206 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9207 fold_overflow_warning (("assuming pointer wraparound does not "
9208 "occur when comparing P +- C1 with "
9209 "P +- C2"),
9210 WARN_STRICT_OVERFLOW_CONDITIONAL);
9212 switch (code)
9214 case EQ_EXPR:
9215 return constant_boolean_node (bitpos0 == bitpos1, type);
9216 case NE_EXPR:
9217 return constant_boolean_node (bitpos0 != bitpos1, type);
9218 case LT_EXPR:
9219 return constant_boolean_node (bitpos0 < bitpos1, type);
9220 case LE_EXPR:
9221 return constant_boolean_node (bitpos0 <= bitpos1, type);
9222 case GE_EXPR:
9223 return constant_boolean_node (bitpos0 >= bitpos1, type);
9224 case GT_EXPR:
9225 return constant_boolean_node (bitpos0 > bitpos1, type);
9226 default:;
9229 /* We can simplify the comparison to a comparison of the variable
9230 offset parts if the constant offset parts are equal.
9231 Be careful to use signed sizetype here because otherwise we
9232 mess with array offsets in the wrong way. This is possible
9233 because pointer arithmetic is restricted to retain within an
9234 object and overflow on pointer differences is undefined as of
9235 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9236 else if (bitpos0 == bitpos1
9237 && ((code == EQ_EXPR || code == NE_EXPR)
9238 || (indirect_base0 && DECL_P (base0))
9239 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9241 /* By converting to signed sizetype we cover middle-end pointer
9242 arithmetic which operates on unsigned pointer types of size
9243 type size and ARRAY_REF offsets which are properly sign or
9244 zero extended from their type in case it is narrower than
9245 sizetype. */
9246 if (offset0 == NULL_TREE)
9247 offset0 = build_int_cst (ssizetype, 0);
9248 else
9249 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9250 if (offset1 == NULL_TREE)
9251 offset1 = build_int_cst (ssizetype, 0);
9252 else
9253 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9255 if (code != EQ_EXPR
9256 && code != NE_EXPR
9257 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9258 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9259 fold_overflow_warning (("assuming pointer wraparound does not "
9260 "occur when comparing P +- C1 with "
9261 "P +- C2"),
9262 WARN_STRICT_OVERFLOW_COMPARISON);
9264 return fold_build2_loc (loc, code, type, offset0, offset1);
9267 /* For non-equal bases we can simplify if they are addresses
9268 of local binding decls or constants. */
9269 else if (indirect_base0 && indirect_base1
9270 /* We know that !operand_equal_p (base0, base1, 0)
9271 because the if condition was false. But make
9272 sure two decls are not the same. */
9273 && base0 != base1
9274 && TREE_CODE (arg0) == ADDR_EXPR
9275 && TREE_CODE (arg1) == ADDR_EXPR
9276 && (((TREE_CODE (base0) == VAR_DECL
9277 || TREE_CODE (base0) == PARM_DECL)
9278 && (targetm.binds_local_p (base0)
9279 || CONSTANT_CLASS_P (base1)))
9280 || CONSTANT_CLASS_P (base0))
9281 && (((TREE_CODE (base1) == VAR_DECL
9282 || TREE_CODE (base1) == PARM_DECL)
9283 && (targetm.binds_local_p (base1)
9284 || CONSTANT_CLASS_P (base0)))
9285 || CONSTANT_CLASS_P (base1)))
9287 if (code == EQ_EXPR)
9288 return omit_two_operands_loc (loc, type, boolean_false_node,
9289 arg0, arg1);
9290 else if (code == NE_EXPR)
9291 return omit_two_operands_loc (loc, type, boolean_true_node,
9292 arg0, arg1);
9294 /* For equal offsets we can simplify to a comparison of the
9295 base addresses. */
9296 else if (bitpos0 == bitpos1
9297 && (indirect_base0
9298 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9299 && (indirect_base1
9300 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9301 && ((offset0 == offset1)
9302 || (offset0 && offset1
9303 && operand_equal_p (offset0, offset1, 0))))
9305 if (indirect_base0)
9306 base0 = build_fold_addr_expr_loc (loc, base0);
9307 if (indirect_base1)
9308 base1 = build_fold_addr_expr_loc (loc, base1);
9309 return fold_build2_loc (loc, code, type, base0, base1);
9313 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9314 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9315 the resulting offset is smaller in absolute value than the
9316 original one and has the same sign. */
9317 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9318 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9319 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9320 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9321 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9322 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9323 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9325 tree const1 = TREE_OPERAND (arg0, 1);
9326 tree const2 = TREE_OPERAND (arg1, 1);
9327 tree variable1 = TREE_OPERAND (arg0, 0);
9328 tree variable2 = TREE_OPERAND (arg1, 0);
9329 tree cst;
9330 const char * const warnmsg = G_("assuming signed overflow does not "
9331 "occur when combining constants around "
9332 "a comparison");
9334 /* Put the constant on the side where it doesn't overflow and is
9335 of lower absolute value and of same sign than before. */
9336 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9337 ? MINUS_EXPR : PLUS_EXPR,
9338 const2, const1);
9339 if (!TREE_OVERFLOW (cst)
9340 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9341 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9343 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9344 return fold_build2_loc (loc, code, type,
9345 variable1,
9346 fold_build2_loc (loc, TREE_CODE (arg1),
9347 TREE_TYPE (arg1),
9348 variable2, cst));
9351 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9352 ? MINUS_EXPR : PLUS_EXPR,
9353 const1, const2);
9354 if (!TREE_OVERFLOW (cst)
9355 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9356 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9358 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9359 return fold_build2_loc (loc, code, type,
9360 fold_build2_loc (loc, TREE_CODE (arg0),
9361 TREE_TYPE (arg0),
9362 variable1, cst),
9363 variable2);
9367 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9368 signed arithmetic case. That form is created by the compiler
9369 often enough for folding it to be of value. One example is in
9370 computing loop trip counts after Operator Strength Reduction. */
9371 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9372 && TREE_CODE (arg0) == MULT_EXPR
9373 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9374 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9375 && integer_zerop (arg1))
9377 tree const1 = TREE_OPERAND (arg0, 1);
9378 tree const2 = arg1; /* zero */
9379 tree variable1 = TREE_OPERAND (arg0, 0);
9380 enum tree_code cmp_code = code;
9382 /* Handle unfolded multiplication by zero. */
9383 if (integer_zerop (const1))
9384 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9386 fold_overflow_warning (("assuming signed overflow does not occur when "
9387 "eliminating multiplication in comparison "
9388 "with zero"),
9389 WARN_STRICT_OVERFLOW_COMPARISON);
9391 /* If const1 is negative we swap the sense of the comparison. */
9392 if (tree_int_cst_sgn (const1) < 0)
9393 cmp_code = swap_tree_comparison (cmp_code);
9395 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9398 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9399 if (tem)
9400 return tem;
9402 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9404 tree targ0 = strip_float_extensions (arg0);
9405 tree targ1 = strip_float_extensions (arg1);
9406 tree newtype = TREE_TYPE (targ0);
9408 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9409 newtype = TREE_TYPE (targ1);
9411 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9412 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9413 return fold_build2_loc (loc, code, type,
9414 fold_convert_loc (loc, newtype, targ0),
9415 fold_convert_loc (loc, newtype, targ1));
9417 /* (-a) CMP (-b) -> b CMP a */
9418 if (TREE_CODE (arg0) == NEGATE_EXPR
9419 && TREE_CODE (arg1) == NEGATE_EXPR)
9420 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9421 TREE_OPERAND (arg0, 0));
9423 if (TREE_CODE (arg1) == REAL_CST)
9425 REAL_VALUE_TYPE cst;
9426 cst = TREE_REAL_CST (arg1);
9428 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9429 if (TREE_CODE (arg0) == NEGATE_EXPR)
9430 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9431 TREE_OPERAND (arg0, 0),
9432 build_real (TREE_TYPE (arg1),
9433 real_value_negate (&cst)));
9435 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9436 /* a CMP (-0) -> a CMP 0 */
9437 if (REAL_VALUE_MINUS_ZERO (cst))
9438 return fold_build2_loc (loc, code, type, arg0,
9439 build_real (TREE_TYPE (arg1), dconst0));
9441 /* x != NaN is always true, other ops are always false. */
9442 if (REAL_VALUE_ISNAN (cst)
9443 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9445 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9446 return omit_one_operand_loc (loc, type, tem, arg0);
9449 /* Fold comparisons against infinity. */
9450 if (REAL_VALUE_ISINF (cst)
9451 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9453 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9454 if (tem != NULL_TREE)
9455 return tem;
9459 /* If this is a comparison of a real constant with a PLUS_EXPR
9460 or a MINUS_EXPR of a real constant, we can convert it into a
9461 comparison with a revised real constant as long as no overflow
9462 occurs when unsafe_math_optimizations are enabled. */
9463 if (flag_unsafe_math_optimizations
9464 && TREE_CODE (arg1) == REAL_CST
9465 && (TREE_CODE (arg0) == PLUS_EXPR
9466 || TREE_CODE (arg0) == MINUS_EXPR)
9467 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9468 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9469 ? MINUS_EXPR : PLUS_EXPR,
9470 arg1, TREE_OPERAND (arg0, 1)))
9471 && !TREE_OVERFLOW (tem))
9472 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9474 /* Likewise, we can simplify a comparison of a real constant with
9475 a MINUS_EXPR whose first operand is also a real constant, i.e.
9476 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9477 floating-point types only if -fassociative-math is set. */
9478 if (flag_associative_math
9479 && TREE_CODE (arg1) == REAL_CST
9480 && TREE_CODE (arg0) == MINUS_EXPR
9481 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9482 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9483 arg1))
9484 && !TREE_OVERFLOW (tem))
9485 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9486 TREE_OPERAND (arg0, 1), tem);
9488 /* Fold comparisons against built-in math functions. */
9489 if (TREE_CODE (arg1) == REAL_CST
9490 && flag_unsafe_math_optimizations
9491 && ! flag_errno_math)
9493 enum built_in_function fcode = builtin_mathfn_code (arg0);
9495 if (fcode != END_BUILTINS)
9497 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9498 if (tem != NULL_TREE)
9499 return tem;
9504 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9505 && CONVERT_EXPR_P (arg0))
9507 /* If we are widening one operand of an integer comparison,
9508 see if the other operand is similarly being widened. Perhaps we
9509 can do the comparison in the narrower type. */
9510 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9511 if (tem)
9512 return tem;
9514 /* Or if we are changing signedness. */
9515 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9516 if (tem)
9517 return tem;
9520 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9521 constant, we can simplify it. */
9522 if (TREE_CODE (arg1) == INTEGER_CST
9523 && (TREE_CODE (arg0) == MIN_EXPR
9524 || TREE_CODE (arg0) == MAX_EXPR)
9525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9527 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9528 if (tem)
9529 return tem;
9532 /* Simplify comparison of something with itself. (For IEEE
9533 floating-point, we can only do some of these simplifications.) */
9534 if (operand_equal_p (arg0, arg1, 0))
9536 switch (code)
9538 case EQ_EXPR:
9539 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9540 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9541 return constant_boolean_node (1, type);
9542 break;
9544 case GE_EXPR:
9545 case LE_EXPR:
9546 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9547 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9548 return constant_boolean_node (1, type);
9549 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9551 case NE_EXPR:
9552 /* For NE, we can only do this simplification if integer
9553 or we don't honor IEEE floating point NaNs. */
9554 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9555 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9556 break;
9557 /* ... fall through ... */
9558 case GT_EXPR:
9559 case LT_EXPR:
9560 return constant_boolean_node (0, type);
9561 default:
9562 gcc_unreachable ();
9566 /* If we are comparing an expression that just has comparisons
9567 of two integer values, arithmetic expressions of those comparisons,
9568 and constants, we can simplify it. There are only three cases
9569 to check: the two values can either be equal, the first can be
9570 greater, or the second can be greater. Fold the expression for
9571 those three values. Since each value must be 0 or 1, we have
9572 eight possibilities, each of which corresponds to the constant 0
9573 or 1 or one of the six possible comparisons.
9575 This handles common cases like (a > b) == 0 but also handles
9576 expressions like ((x > y) - (y > x)) > 0, which supposedly
9577 occur in macroized code. */
9579 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9581 tree cval1 = 0, cval2 = 0;
9582 int save_p = 0;
9584 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9585 /* Don't handle degenerate cases here; they should already
9586 have been handled anyway. */
9587 && cval1 != 0 && cval2 != 0
9588 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9589 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9590 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9591 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9592 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9593 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9594 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9596 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9597 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9599 /* We can't just pass T to eval_subst in case cval1 or cval2
9600 was the same as ARG1. */
9602 tree high_result
9603 = fold_build2_loc (loc, code, type,
9604 eval_subst (loc, arg0, cval1, maxval,
9605 cval2, minval),
9606 arg1);
9607 tree equal_result
9608 = fold_build2_loc (loc, code, type,
9609 eval_subst (loc, arg0, cval1, maxval,
9610 cval2, maxval),
9611 arg1);
9612 tree low_result
9613 = fold_build2_loc (loc, code, type,
9614 eval_subst (loc, arg0, cval1, minval,
9615 cval2, maxval),
9616 arg1);
9618 /* All three of these results should be 0 or 1. Confirm they are.
9619 Then use those values to select the proper code to use. */
9621 if (TREE_CODE (high_result) == INTEGER_CST
9622 && TREE_CODE (equal_result) == INTEGER_CST
9623 && TREE_CODE (low_result) == INTEGER_CST)
9625 /* Make a 3-bit mask with the high-order bit being the
9626 value for `>', the next for '=', and the low for '<'. */
9627 switch ((integer_onep (high_result) * 4)
9628 + (integer_onep (equal_result) * 2)
9629 + integer_onep (low_result))
9631 case 0:
9632 /* Always false. */
9633 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9634 case 1:
9635 code = LT_EXPR;
9636 break;
9637 case 2:
9638 code = EQ_EXPR;
9639 break;
9640 case 3:
9641 code = LE_EXPR;
9642 break;
9643 case 4:
9644 code = GT_EXPR;
9645 break;
9646 case 5:
9647 code = NE_EXPR;
9648 break;
9649 case 6:
9650 code = GE_EXPR;
9651 break;
9652 case 7:
9653 /* Always true. */
9654 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9657 if (save_p)
9659 tem = save_expr (build2 (code, type, cval1, cval2));
9660 SET_EXPR_LOCATION (tem, loc);
9661 return tem;
9663 return fold_build2_loc (loc, code, type, cval1, cval2);
9668 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9669 into a single range test. */
9670 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9671 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9672 && TREE_CODE (arg1) == INTEGER_CST
9673 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9674 && !integer_zerop (TREE_OPERAND (arg0, 1))
9675 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9676 && !TREE_OVERFLOW (arg1))
9678 tem = fold_div_compare (loc, code, type, arg0, arg1);
9679 if (tem != NULL_TREE)
9680 return tem;
9683 /* Fold ~X op ~Y as Y op X. */
9684 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9685 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9687 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9688 return fold_build2_loc (loc, code, type,
9689 fold_convert_loc (loc, cmp_type,
9690 TREE_OPERAND (arg1, 0)),
9691 TREE_OPERAND (arg0, 0));
9694 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9695 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9696 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9698 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9699 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9700 TREE_OPERAND (arg0, 0),
9701 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9702 fold_convert_loc (loc, cmp_type, arg1)));
9705 return NULL_TREE;
9709 /* Subroutine of fold_binary. Optimize complex multiplications of the
9710 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9711 argument EXPR represents the expression "z" of type TYPE. */
9713 static tree
9714 fold_mult_zconjz (location_t loc, tree type, tree expr)
9716 tree itype = TREE_TYPE (type);
9717 tree rpart, ipart, tem;
9719 if (TREE_CODE (expr) == COMPLEX_EXPR)
9721 rpart = TREE_OPERAND (expr, 0);
9722 ipart = TREE_OPERAND (expr, 1);
9724 else if (TREE_CODE (expr) == COMPLEX_CST)
9726 rpart = TREE_REALPART (expr);
9727 ipart = TREE_IMAGPART (expr);
9729 else
9731 expr = save_expr (expr);
9732 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9733 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9736 rpart = save_expr (rpart);
9737 ipart = save_expr (ipart);
9738 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9739 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9740 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9741 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9742 build_zero_cst (itype));
9746 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9747 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9748 guarantees that P and N have the same least significant log2(M) bits.
9749 N is not otherwise constrained. In particular, N is not normalized to
9750 0 <= N < M as is common. In general, the precise value of P is unknown.
9751 M is chosen as large as possible such that constant N can be determined.
9753 Returns M and sets *RESIDUE to N.
9755 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9756 account. This is not always possible due to PR 35705.
9759 static unsigned HOST_WIDE_INT
9760 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9761 bool allow_func_align)
9763 enum tree_code code;
9765 *residue = 0;
9767 code = TREE_CODE (expr);
9768 if (code == ADDR_EXPR)
9770 unsigned int bitalign;
9771 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9772 *residue /= BITS_PER_UNIT;
9773 return bitalign / BITS_PER_UNIT;
9775 else if (code == POINTER_PLUS_EXPR)
9777 tree op0, op1;
9778 unsigned HOST_WIDE_INT modulus;
9779 enum tree_code inner_code;
9781 op0 = TREE_OPERAND (expr, 0);
9782 STRIP_NOPS (op0);
9783 modulus = get_pointer_modulus_and_residue (op0, residue,
9784 allow_func_align);
9786 op1 = TREE_OPERAND (expr, 1);
9787 STRIP_NOPS (op1);
9788 inner_code = TREE_CODE (op1);
9789 if (inner_code == INTEGER_CST)
9791 *residue += TREE_INT_CST_LOW (op1);
9792 return modulus;
9794 else if (inner_code == MULT_EXPR)
9796 op1 = TREE_OPERAND (op1, 1);
9797 if (TREE_CODE (op1) == INTEGER_CST)
9799 unsigned HOST_WIDE_INT align;
9801 /* Compute the greatest power-of-2 divisor of op1. */
9802 align = TREE_INT_CST_LOW (op1);
9803 align &= -align;
9805 /* If align is non-zero and less than *modulus, replace
9806 *modulus with align., If align is 0, then either op1 is 0
9807 or the greatest power-of-2 divisor of op1 doesn't fit in an
9808 unsigned HOST_WIDE_INT. In either case, no additional
9809 constraint is imposed. */
9810 if (align)
9811 modulus = MIN (modulus, align);
9813 return modulus;
9818 /* If we get here, we were unable to determine anything useful about the
9819 expression. */
9820 return 1;
9823 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9824 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9826 static bool
9827 vec_cst_ctor_to_array (tree arg, tree *elts)
9829 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9831 if (TREE_CODE (arg) == VECTOR_CST)
9833 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9834 elts[i] = VECTOR_CST_ELT (arg, i);
9836 else if (TREE_CODE (arg) == CONSTRUCTOR)
9838 constructor_elt *elt;
9840 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9841 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9842 return false;
9843 else
9844 elts[i] = elt->value;
9846 else
9847 return false;
9848 for (; i < nelts; i++)
9849 elts[i]
9850 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9851 return true;
9854 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9855 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9856 NULL_TREE otherwise. */
9858 static tree
9859 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9861 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9862 tree *elts;
9863 bool need_ctor = false;
9865 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9866 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9867 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9868 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9869 return NULL_TREE;
9871 elts = XALLOCAVEC (tree, nelts * 3);
9872 if (!vec_cst_ctor_to_array (arg0, elts)
9873 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9874 return NULL_TREE;
9876 for (i = 0; i < nelts; i++)
9878 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9879 need_ctor = true;
9880 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9883 if (need_ctor)
9885 vec<constructor_elt, va_gc> *v;
9886 vec_alloc (v, nelts);
9887 for (i = 0; i < nelts; i++)
9888 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9889 return build_constructor (type, v);
9891 else
9892 return build_vector (type, &elts[2 * nelts]);
9895 /* Try to fold a pointer difference of type TYPE two address expressions of
9896 array references AREF0 and AREF1 using location LOC. Return a
9897 simplified expression for the difference or NULL_TREE. */
9899 static tree
9900 fold_addr_of_array_ref_difference (location_t loc, tree type,
9901 tree aref0, tree aref1)
9903 tree base0 = TREE_OPERAND (aref0, 0);
9904 tree base1 = TREE_OPERAND (aref1, 0);
9905 tree base_offset = build_int_cst (type, 0);
9907 /* If the bases are array references as well, recurse. If the bases
9908 are pointer indirections compute the difference of the pointers.
9909 If the bases are equal, we are set. */
9910 if ((TREE_CODE (base0) == ARRAY_REF
9911 && TREE_CODE (base1) == ARRAY_REF
9912 && (base_offset
9913 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9914 || (INDIRECT_REF_P (base0)
9915 && INDIRECT_REF_P (base1)
9916 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9917 TREE_OPERAND (base0, 0),
9918 TREE_OPERAND (base1, 0))))
9919 || operand_equal_p (base0, base1, 0))
9921 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9922 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9923 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9924 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9925 return fold_build2_loc (loc, PLUS_EXPR, type,
9926 base_offset,
9927 fold_build2_loc (loc, MULT_EXPR, type,
9928 diff, esz));
9930 return NULL_TREE;
9933 /* If the real or vector real constant CST of type TYPE has an exact
9934 inverse, return it, else return NULL. */
9936 static tree
9937 exact_inverse (tree type, tree cst)
9939 REAL_VALUE_TYPE r;
9940 tree unit_type, *elts;
9941 enum machine_mode mode;
9942 unsigned vec_nelts, i;
9944 switch (TREE_CODE (cst))
9946 case REAL_CST:
9947 r = TREE_REAL_CST (cst);
9949 if (exact_real_inverse (TYPE_MODE (type), &r))
9950 return build_real (type, r);
9952 return NULL_TREE;
9954 case VECTOR_CST:
9955 vec_nelts = VECTOR_CST_NELTS (cst);
9956 elts = XALLOCAVEC (tree, vec_nelts);
9957 unit_type = TREE_TYPE (type);
9958 mode = TYPE_MODE (unit_type);
9960 for (i = 0; i < vec_nelts; i++)
9962 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9963 if (!exact_real_inverse (mode, &r))
9964 return NULL_TREE;
9965 elts[i] = build_real (unit_type, r);
9968 return build_vector (type, elts);
9970 default:
9971 return NULL_TREE;
9975 /* Mask out the tz least significant bits of X of type TYPE where
9976 tz is the number of trailing zeroes in Y. */
9977 static double_int
9978 mask_with_tz (tree type, double_int x, double_int y)
9980 int tz = y.trailing_zeros ();
9982 if (tz > 0)
9984 double_int mask;
9986 mask = ~double_int::mask (tz);
9987 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9988 return mask & x;
9990 return x;
9993 /* Return true when T is an address and is known to be nonzero.
9994 For floating point we further ensure that T is not denormal.
9995 Similar logic is present in nonzero_address in rtlanal.h.
9997 If the return value is based on the assumption that signed overflow
9998 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9999 change *STRICT_OVERFLOW_P. */
10001 static bool
10002 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10004 tree type = TREE_TYPE (t);
10005 enum tree_code code;
10007 /* Doing something useful for floating point would need more work. */
10008 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10009 return false;
10011 code = TREE_CODE (t);
10012 switch (TREE_CODE_CLASS (code))
10014 case tcc_unary:
10015 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10016 strict_overflow_p);
10017 case tcc_binary:
10018 case tcc_comparison:
10019 return tree_binary_nonzero_warnv_p (code, type,
10020 TREE_OPERAND (t, 0),
10021 TREE_OPERAND (t, 1),
10022 strict_overflow_p);
10023 case tcc_constant:
10024 case tcc_declaration:
10025 case tcc_reference:
10026 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10028 default:
10029 break;
10032 switch (code)
10034 case TRUTH_NOT_EXPR:
10035 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10036 strict_overflow_p);
10038 case TRUTH_AND_EXPR:
10039 case TRUTH_OR_EXPR:
10040 case TRUTH_XOR_EXPR:
10041 return tree_binary_nonzero_warnv_p (code, type,
10042 TREE_OPERAND (t, 0),
10043 TREE_OPERAND (t, 1),
10044 strict_overflow_p);
10046 case COND_EXPR:
10047 case CONSTRUCTOR:
10048 case OBJ_TYPE_REF:
10049 case ASSERT_EXPR:
10050 case ADDR_EXPR:
10051 case WITH_SIZE_EXPR:
10052 case SSA_NAME:
10053 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10055 case COMPOUND_EXPR:
10056 case MODIFY_EXPR:
10057 case BIND_EXPR:
10058 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10059 strict_overflow_p);
10061 case SAVE_EXPR:
10062 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10063 strict_overflow_p);
10065 case CALL_EXPR:
10067 tree fndecl = get_callee_fndecl (t);
10068 if (!fndecl) return false;
10069 if (flag_delete_null_pointer_checks && !flag_check_new
10070 && DECL_IS_OPERATOR_NEW (fndecl)
10071 && !TREE_NOTHROW (fndecl))
10072 return true;
10073 if (flag_delete_null_pointer_checks
10074 && lookup_attribute ("returns_nonnull",
10075 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10076 return true;
10077 return alloca_call_p (t);
10080 default:
10081 break;
10083 return false;
10086 /* Return true when T is an address and is known to be nonzero.
10087 Handle warnings about undefined signed overflow. */
10089 static bool
10090 tree_expr_nonzero_p (tree t)
10092 bool ret, strict_overflow_p;
10094 strict_overflow_p = false;
10095 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10096 if (strict_overflow_p)
10097 fold_overflow_warning (("assuming signed overflow does not occur when "
10098 "determining that expression is always "
10099 "non-zero"),
10100 WARN_STRICT_OVERFLOW_MISC);
10101 return ret;
10104 /* Fold a binary expression of code CODE and type TYPE with operands
10105 OP0 and OP1. LOC is the location of the resulting expression.
10106 Return the folded expression if folding is successful. Otherwise,
10107 return NULL_TREE. */
10109 tree
10110 fold_binary_loc (location_t loc,
10111 enum tree_code code, tree type, tree op0, tree op1)
10113 enum tree_code_class kind = TREE_CODE_CLASS (code);
10114 tree arg0, arg1, tem;
10115 tree t1 = NULL_TREE;
10116 bool strict_overflow_p;
10117 unsigned int prec;
10119 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10120 && TREE_CODE_LENGTH (code) == 2
10121 && op0 != NULL_TREE
10122 && op1 != NULL_TREE);
10124 arg0 = op0;
10125 arg1 = op1;
10127 /* Strip any conversions that don't change the mode. This is
10128 safe for every expression, except for a comparison expression
10129 because its signedness is derived from its operands. So, in
10130 the latter case, only strip conversions that don't change the
10131 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10132 preserved.
10134 Note that this is done as an internal manipulation within the
10135 constant folder, in order to find the simplest representation
10136 of the arguments so that their form can be studied. In any
10137 cases, the appropriate type conversions should be put back in
10138 the tree that will get out of the constant folder. */
10140 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10142 STRIP_SIGN_NOPS (arg0);
10143 STRIP_SIGN_NOPS (arg1);
10145 else
10147 STRIP_NOPS (arg0);
10148 STRIP_NOPS (arg1);
10151 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10152 constant but we can't do arithmetic on them. */
10153 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10154 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10155 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10156 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10157 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10158 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10159 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10161 if (kind == tcc_binary)
10163 /* Make sure type and arg0 have the same saturating flag. */
10164 gcc_assert (TYPE_SATURATING (type)
10165 == TYPE_SATURATING (TREE_TYPE (arg0)));
10166 tem = const_binop (code, arg0, arg1);
10168 else if (kind == tcc_comparison)
10169 tem = fold_relational_const (code, type, arg0, arg1);
10170 else
10171 tem = NULL_TREE;
10173 if (tem != NULL_TREE)
10175 if (TREE_TYPE (tem) != type)
10176 tem = fold_convert_loc (loc, type, tem);
10177 return tem;
10181 /* If this is a commutative operation, and ARG0 is a constant, move it
10182 to ARG1 to reduce the number of tests below. */
10183 if (commutative_tree_code (code)
10184 && tree_swap_operands_p (arg0, arg1, true))
10185 return fold_build2_loc (loc, code, type, op1, op0);
10187 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10189 First check for cases where an arithmetic operation is applied to a
10190 compound, conditional, or comparison operation. Push the arithmetic
10191 operation inside the compound or conditional to see if any folding
10192 can then be done. Convert comparison to conditional for this purpose.
10193 The also optimizes non-constant cases that used to be done in
10194 expand_expr.
10196 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10197 one of the operands is a comparison and the other is a comparison, a
10198 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10199 code below would make the expression more complex. Change it to a
10200 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10201 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10203 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10204 || code == EQ_EXPR || code == NE_EXPR)
10205 && TREE_CODE (type) != VECTOR_TYPE
10206 && ((truth_value_p (TREE_CODE (arg0))
10207 && (truth_value_p (TREE_CODE (arg1))
10208 || (TREE_CODE (arg1) == BIT_AND_EXPR
10209 && integer_onep (TREE_OPERAND (arg1, 1)))))
10210 || (truth_value_p (TREE_CODE (arg1))
10211 && (truth_value_p (TREE_CODE (arg0))
10212 || (TREE_CODE (arg0) == BIT_AND_EXPR
10213 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10215 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10216 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10217 : TRUTH_XOR_EXPR,
10218 boolean_type_node,
10219 fold_convert_loc (loc, boolean_type_node, arg0),
10220 fold_convert_loc (loc, boolean_type_node, arg1));
10222 if (code == EQ_EXPR)
10223 tem = invert_truthvalue_loc (loc, tem);
10225 return fold_convert_loc (loc, type, tem);
10228 if (TREE_CODE_CLASS (code) == tcc_binary
10229 || TREE_CODE_CLASS (code) == tcc_comparison)
10231 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10233 tem = fold_build2_loc (loc, code, type,
10234 fold_convert_loc (loc, TREE_TYPE (op0),
10235 TREE_OPERAND (arg0, 1)), op1);
10236 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10237 tem);
10239 if (TREE_CODE (arg1) == COMPOUND_EXPR
10240 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10242 tem = fold_build2_loc (loc, code, type, op0,
10243 fold_convert_loc (loc, TREE_TYPE (op1),
10244 TREE_OPERAND (arg1, 1)));
10245 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10246 tem);
10249 if (TREE_CODE (arg0) == COND_EXPR
10250 || TREE_CODE (arg0) == VEC_COND_EXPR
10251 || COMPARISON_CLASS_P (arg0))
10253 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10254 arg0, arg1,
10255 /*cond_first_p=*/1);
10256 if (tem != NULL_TREE)
10257 return tem;
10260 if (TREE_CODE (arg1) == COND_EXPR
10261 || TREE_CODE (arg1) == VEC_COND_EXPR
10262 || COMPARISON_CLASS_P (arg1))
10264 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10265 arg1, arg0,
10266 /*cond_first_p=*/0);
10267 if (tem != NULL_TREE)
10268 return tem;
10272 switch (code)
10274 case MEM_REF:
10275 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10276 if (TREE_CODE (arg0) == ADDR_EXPR
10277 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10279 tree iref = TREE_OPERAND (arg0, 0);
10280 return fold_build2 (MEM_REF, type,
10281 TREE_OPERAND (iref, 0),
10282 int_const_binop (PLUS_EXPR, arg1,
10283 TREE_OPERAND (iref, 1)));
10286 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10287 if (TREE_CODE (arg0) == ADDR_EXPR
10288 && handled_component_p (TREE_OPERAND (arg0, 0)))
10290 tree base;
10291 HOST_WIDE_INT coffset;
10292 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10293 &coffset);
10294 if (!base)
10295 return NULL_TREE;
10296 return fold_build2 (MEM_REF, type,
10297 build_fold_addr_expr (base),
10298 int_const_binop (PLUS_EXPR, arg1,
10299 size_int (coffset)));
10302 return NULL_TREE;
10304 case POINTER_PLUS_EXPR:
10305 /* 0 +p index -> (type)index */
10306 if (integer_zerop (arg0))
10307 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10309 /* PTR +p 0 -> PTR */
10310 if (integer_zerop (arg1))
10311 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10313 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10314 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10315 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10316 return fold_convert_loc (loc, type,
10317 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10318 fold_convert_loc (loc, sizetype,
10319 arg1),
10320 fold_convert_loc (loc, sizetype,
10321 arg0)));
10323 /* (PTR +p B) +p A -> PTR +p (B + A) */
10324 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10326 tree inner;
10327 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10328 tree arg00 = TREE_OPERAND (arg0, 0);
10329 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10330 arg01, fold_convert_loc (loc, sizetype, arg1));
10331 return fold_convert_loc (loc, type,
10332 fold_build_pointer_plus_loc (loc,
10333 arg00, inner));
10336 /* PTR_CST +p CST -> CST1 */
10337 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10338 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10339 fold_convert_loc (loc, type, arg1));
10341 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10342 of the array. Loop optimizer sometimes produce this type of
10343 expressions. */
10344 if (TREE_CODE (arg0) == ADDR_EXPR)
10346 tem = try_move_mult_to_index (loc, arg0,
10347 fold_convert_loc (loc,
10348 ssizetype, arg1));
10349 if (tem)
10350 return fold_convert_loc (loc, type, tem);
10353 return NULL_TREE;
10355 case PLUS_EXPR:
10356 /* A + (-B) -> A - B */
10357 if (TREE_CODE (arg1) == NEGATE_EXPR
10358 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10359 return fold_build2_loc (loc, MINUS_EXPR, type,
10360 fold_convert_loc (loc, type, arg0),
10361 fold_convert_loc (loc, type,
10362 TREE_OPERAND (arg1, 0)));
10363 /* (-A) + B -> B - A */
10364 if (TREE_CODE (arg0) == NEGATE_EXPR
10365 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10366 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10367 return fold_build2_loc (loc, MINUS_EXPR, type,
10368 fold_convert_loc (loc, type, arg1),
10369 fold_convert_loc (loc, type,
10370 TREE_OPERAND (arg0, 0)));
10372 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10374 /* Convert ~A + 1 to -A. */
10375 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10376 && integer_onep (arg1))
10377 return fold_build1_loc (loc, NEGATE_EXPR, type,
10378 fold_convert_loc (loc, type,
10379 TREE_OPERAND (arg0, 0)));
10381 /* ~X + X is -1. */
10382 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10383 && !TYPE_OVERFLOW_TRAPS (type))
10385 tree tem = TREE_OPERAND (arg0, 0);
10387 STRIP_NOPS (tem);
10388 if (operand_equal_p (tem, arg1, 0))
10390 t1 = build_all_ones_cst (type);
10391 return omit_one_operand_loc (loc, type, t1, arg1);
10395 /* X + ~X is -1. */
10396 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10397 && !TYPE_OVERFLOW_TRAPS (type))
10399 tree tem = TREE_OPERAND (arg1, 0);
10401 STRIP_NOPS (tem);
10402 if (operand_equal_p (arg0, tem, 0))
10404 t1 = build_all_ones_cst (type);
10405 return omit_one_operand_loc (loc, type, t1, arg0);
10409 /* X + (X / CST) * -CST is X % CST. */
10410 if (TREE_CODE (arg1) == MULT_EXPR
10411 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10412 && operand_equal_p (arg0,
10413 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10415 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10416 tree cst1 = TREE_OPERAND (arg1, 1);
10417 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10418 cst1, cst0);
10419 if (sum && integer_zerop (sum))
10420 return fold_convert_loc (loc, type,
10421 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10422 TREE_TYPE (arg0), arg0,
10423 cst0));
10427 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10428 one. Make sure the type is not saturating and has the signedness of
10429 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10430 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10431 if ((TREE_CODE (arg0) == MULT_EXPR
10432 || TREE_CODE (arg1) == MULT_EXPR)
10433 && !TYPE_SATURATING (type)
10434 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10435 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10436 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10438 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10439 if (tem)
10440 return tem;
10443 if (! FLOAT_TYPE_P (type))
10445 if (integer_zerop (arg1))
10446 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10448 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10449 with a constant, and the two constants have no bits in common,
10450 we should treat this as a BIT_IOR_EXPR since this may produce more
10451 simplifications. */
10452 if (TREE_CODE (arg0) == BIT_AND_EXPR
10453 && TREE_CODE (arg1) == BIT_AND_EXPR
10454 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10455 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10456 && integer_zerop (const_binop (BIT_AND_EXPR,
10457 TREE_OPERAND (arg0, 1),
10458 TREE_OPERAND (arg1, 1))))
10460 code = BIT_IOR_EXPR;
10461 goto bit_ior;
10464 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10465 (plus (plus (mult) (mult)) (foo)) so that we can
10466 take advantage of the factoring cases below. */
10467 if (TYPE_OVERFLOW_WRAPS (type)
10468 && (((TREE_CODE (arg0) == PLUS_EXPR
10469 || TREE_CODE (arg0) == MINUS_EXPR)
10470 && TREE_CODE (arg1) == MULT_EXPR)
10471 || ((TREE_CODE (arg1) == PLUS_EXPR
10472 || TREE_CODE (arg1) == MINUS_EXPR)
10473 && TREE_CODE (arg0) == MULT_EXPR)))
10475 tree parg0, parg1, parg, marg;
10476 enum tree_code pcode;
10478 if (TREE_CODE (arg1) == MULT_EXPR)
10479 parg = arg0, marg = arg1;
10480 else
10481 parg = arg1, marg = arg0;
10482 pcode = TREE_CODE (parg);
10483 parg0 = TREE_OPERAND (parg, 0);
10484 parg1 = TREE_OPERAND (parg, 1);
10485 STRIP_NOPS (parg0);
10486 STRIP_NOPS (parg1);
10488 if (TREE_CODE (parg0) == MULT_EXPR
10489 && TREE_CODE (parg1) != MULT_EXPR)
10490 return fold_build2_loc (loc, pcode, type,
10491 fold_build2_loc (loc, PLUS_EXPR, type,
10492 fold_convert_loc (loc, type,
10493 parg0),
10494 fold_convert_loc (loc, type,
10495 marg)),
10496 fold_convert_loc (loc, type, parg1));
10497 if (TREE_CODE (parg0) != MULT_EXPR
10498 && TREE_CODE (parg1) == MULT_EXPR)
10499 return
10500 fold_build2_loc (loc, PLUS_EXPR, type,
10501 fold_convert_loc (loc, type, parg0),
10502 fold_build2_loc (loc, pcode, type,
10503 fold_convert_loc (loc, type, marg),
10504 fold_convert_loc (loc, type,
10505 parg1)));
10508 else
10510 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10511 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10512 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10514 /* Likewise if the operands are reversed. */
10515 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10516 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10518 /* Convert X + -C into X - C. */
10519 if (TREE_CODE (arg1) == REAL_CST
10520 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10522 tem = fold_negate_const (arg1, type);
10523 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10524 return fold_build2_loc (loc, MINUS_EXPR, type,
10525 fold_convert_loc (loc, type, arg0),
10526 fold_convert_loc (loc, type, tem));
10529 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10530 to __complex__ ( x, y ). This is not the same for SNaNs or
10531 if signed zeros are involved. */
10532 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10533 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10534 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10536 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10537 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10538 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10539 bool arg0rz = false, arg0iz = false;
10540 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10541 || (arg0i && (arg0iz = real_zerop (arg0i))))
10543 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10544 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10545 if (arg0rz && arg1i && real_zerop (arg1i))
10547 tree rp = arg1r ? arg1r
10548 : build1 (REALPART_EXPR, rtype, arg1);
10549 tree ip = arg0i ? arg0i
10550 : build1 (IMAGPART_EXPR, rtype, arg0);
10551 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10553 else if (arg0iz && arg1r && real_zerop (arg1r))
10555 tree rp = arg0r ? arg0r
10556 : build1 (REALPART_EXPR, rtype, arg0);
10557 tree ip = arg1i ? arg1i
10558 : build1 (IMAGPART_EXPR, rtype, arg1);
10559 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10564 if (flag_unsafe_math_optimizations
10565 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10566 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10567 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10568 return tem;
10570 /* Convert x+x into x*2.0. */
10571 if (operand_equal_p (arg0, arg1, 0)
10572 && SCALAR_FLOAT_TYPE_P (type))
10573 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10574 build_real (type, dconst2));
10576 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10577 We associate floats only if the user has specified
10578 -fassociative-math. */
10579 if (flag_associative_math
10580 && TREE_CODE (arg1) == PLUS_EXPR
10581 && TREE_CODE (arg0) != MULT_EXPR)
10583 tree tree10 = TREE_OPERAND (arg1, 0);
10584 tree tree11 = TREE_OPERAND (arg1, 1);
10585 if (TREE_CODE (tree11) == MULT_EXPR
10586 && TREE_CODE (tree10) == MULT_EXPR)
10588 tree tree0;
10589 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10590 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10593 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10594 We associate floats only if the user has specified
10595 -fassociative-math. */
10596 if (flag_associative_math
10597 && TREE_CODE (arg0) == PLUS_EXPR
10598 && TREE_CODE (arg1) != MULT_EXPR)
10600 tree tree00 = TREE_OPERAND (arg0, 0);
10601 tree tree01 = TREE_OPERAND (arg0, 1);
10602 if (TREE_CODE (tree01) == MULT_EXPR
10603 && TREE_CODE (tree00) == MULT_EXPR)
10605 tree tree0;
10606 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10607 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10612 bit_rotate:
10613 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10614 is a rotate of A by C1 bits. */
10615 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10616 is a rotate of A by B bits. */
10618 enum tree_code code0, code1;
10619 tree rtype;
10620 code0 = TREE_CODE (arg0);
10621 code1 = TREE_CODE (arg1);
10622 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10623 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10624 && operand_equal_p (TREE_OPERAND (arg0, 0),
10625 TREE_OPERAND (arg1, 0), 0)
10626 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10627 TYPE_UNSIGNED (rtype))
10628 /* Only create rotates in complete modes. Other cases are not
10629 expanded properly. */
10630 && (element_precision (rtype)
10631 == element_precision (TYPE_MODE (rtype))))
10633 tree tree01, tree11;
10634 enum tree_code code01, code11;
10636 tree01 = TREE_OPERAND (arg0, 1);
10637 tree11 = TREE_OPERAND (arg1, 1);
10638 STRIP_NOPS (tree01);
10639 STRIP_NOPS (tree11);
10640 code01 = TREE_CODE (tree01);
10641 code11 = TREE_CODE (tree11);
10642 if (code01 == INTEGER_CST
10643 && code11 == INTEGER_CST
10644 && TREE_INT_CST_HIGH (tree01) == 0
10645 && TREE_INT_CST_HIGH (tree11) == 0
10646 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10647 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10649 tem = build2_loc (loc, LROTATE_EXPR,
10650 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10651 TREE_OPERAND (arg0, 0),
10652 code0 == LSHIFT_EXPR ? tree01 : tree11);
10653 return fold_convert_loc (loc, type, tem);
10655 else if (code11 == MINUS_EXPR)
10657 tree tree110, tree111;
10658 tree110 = TREE_OPERAND (tree11, 0);
10659 tree111 = TREE_OPERAND (tree11, 1);
10660 STRIP_NOPS (tree110);
10661 STRIP_NOPS (tree111);
10662 if (TREE_CODE (tree110) == INTEGER_CST
10663 && 0 == compare_tree_int (tree110,
10664 element_precision
10665 (TREE_TYPE (TREE_OPERAND
10666 (arg0, 0))))
10667 && operand_equal_p (tree01, tree111, 0))
10668 return
10669 fold_convert_loc (loc, type,
10670 build2 ((code0 == LSHIFT_EXPR
10671 ? LROTATE_EXPR
10672 : RROTATE_EXPR),
10673 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10674 TREE_OPERAND (arg0, 0), tree01));
10676 else if (code01 == MINUS_EXPR)
10678 tree tree010, tree011;
10679 tree010 = TREE_OPERAND (tree01, 0);
10680 tree011 = TREE_OPERAND (tree01, 1);
10681 STRIP_NOPS (tree010);
10682 STRIP_NOPS (tree011);
10683 if (TREE_CODE (tree010) == INTEGER_CST
10684 && 0 == compare_tree_int (tree010,
10685 element_precision
10686 (TREE_TYPE (TREE_OPERAND
10687 (arg0, 0))))
10688 && operand_equal_p (tree11, tree011, 0))
10689 return fold_convert_loc
10690 (loc, type,
10691 build2 ((code0 != LSHIFT_EXPR
10692 ? LROTATE_EXPR
10693 : RROTATE_EXPR),
10694 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10695 TREE_OPERAND (arg0, 0), tree11));
10700 associate:
10701 /* In most languages, can't associate operations on floats through
10702 parentheses. Rather than remember where the parentheses were, we
10703 don't associate floats at all, unless the user has specified
10704 -fassociative-math.
10705 And, we need to make sure type is not saturating. */
10707 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10708 && !TYPE_SATURATING (type))
10710 tree var0, con0, lit0, minus_lit0;
10711 tree var1, con1, lit1, minus_lit1;
10712 tree atype = type;
10713 bool ok = true;
10715 /* Split both trees into variables, constants, and literals. Then
10716 associate each group together, the constants with literals,
10717 then the result with variables. This increases the chances of
10718 literals being recombined later and of generating relocatable
10719 expressions for the sum of a constant and literal. */
10720 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10721 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10722 code == MINUS_EXPR);
10724 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10725 if (code == MINUS_EXPR)
10726 code = PLUS_EXPR;
10728 /* With undefined overflow prefer doing association in a type
10729 which wraps on overflow, if that is one of the operand types. */
10730 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10731 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10733 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10734 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10735 atype = TREE_TYPE (arg0);
10736 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10737 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10738 atype = TREE_TYPE (arg1);
10739 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10742 /* With undefined overflow we can only associate constants with one
10743 variable, and constants whose association doesn't overflow. */
10744 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10745 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10747 if (var0 && var1)
10749 tree tmp0 = var0;
10750 tree tmp1 = var1;
10752 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10753 tmp0 = TREE_OPERAND (tmp0, 0);
10754 if (CONVERT_EXPR_P (tmp0)
10755 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10756 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10757 <= TYPE_PRECISION (atype)))
10758 tmp0 = TREE_OPERAND (tmp0, 0);
10759 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10760 tmp1 = TREE_OPERAND (tmp1, 0);
10761 if (CONVERT_EXPR_P (tmp1)
10762 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10763 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10764 <= TYPE_PRECISION (atype)))
10765 tmp1 = TREE_OPERAND (tmp1, 0);
10766 /* The only case we can still associate with two variables
10767 is if they are the same, modulo negation and bit-pattern
10768 preserving conversions. */
10769 if (!operand_equal_p (tmp0, tmp1, 0))
10770 ok = false;
10774 /* Only do something if we found more than two objects. Otherwise,
10775 nothing has changed and we risk infinite recursion. */
10776 if (ok
10777 && (2 < ((var0 != 0) + (var1 != 0)
10778 + (con0 != 0) + (con1 != 0)
10779 + (lit0 != 0) + (lit1 != 0)
10780 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10782 bool any_overflows = false;
10783 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10784 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10785 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10786 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10787 var0 = associate_trees (loc, var0, var1, code, atype);
10788 con0 = associate_trees (loc, con0, con1, code, atype);
10789 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10790 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10791 code, atype);
10793 /* Preserve the MINUS_EXPR if the negative part of the literal is
10794 greater than the positive part. Otherwise, the multiplicative
10795 folding code (i.e extract_muldiv) may be fooled in case
10796 unsigned constants are subtracted, like in the following
10797 example: ((X*2 + 4) - 8U)/2. */
10798 if (minus_lit0 && lit0)
10800 if (TREE_CODE (lit0) == INTEGER_CST
10801 && TREE_CODE (minus_lit0) == INTEGER_CST
10802 && tree_int_cst_lt (lit0, minus_lit0))
10804 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10805 MINUS_EXPR, atype);
10806 lit0 = 0;
10808 else
10810 lit0 = associate_trees (loc, lit0, minus_lit0,
10811 MINUS_EXPR, atype);
10812 minus_lit0 = 0;
10816 /* Don't introduce overflows through reassociation. */
10817 if (!any_overflows
10818 && ((lit0 && TREE_OVERFLOW_P (lit0))
10819 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10820 return NULL_TREE;
10822 if (minus_lit0)
10824 if (con0 == 0)
10825 return
10826 fold_convert_loc (loc, type,
10827 associate_trees (loc, var0, minus_lit0,
10828 MINUS_EXPR, atype));
10829 else
10831 con0 = associate_trees (loc, con0, minus_lit0,
10832 MINUS_EXPR, atype);
10833 return
10834 fold_convert_loc (loc, type,
10835 associate_trees (loc, var0, con0,
10836 PLUS_EXPR, atype));
10840 con0 = associate_trees (loc, con0, lit0, code, atype);
10841 return
10842 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10843 code, atype));
10847 return NULL_TREE;
10849 case MINUS_EXPR:
10850 /* Pointer simplifications for subtraction, simple reassociations. */
10851 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10853 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10854 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10855 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10857 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10858 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10859 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10860 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10861 return fold_build2_loc (loc, PLUS_EXPR, type,
10862 fold_build2_loc (loc, MINUS_EXPR, type,
10863 arg00, arg10),
10864 fold_build2_loc (loc, MINUS_EXPR, type,
10865 arg01, arg11));
10867 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10868 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10870 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10871 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10872 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10873 fold_convert_loc (loc, type, arg1));
10874 if (tmp)
10875 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10878 /* A - (-B) -> A + B */
10879 if (TREE_CODE (arg1) == NEGATE_EXPR)
10880 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10881 fold_convert_loc (loc, type,
10882 TREE_OPERAND (arg1, 0)));
10883 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10884 if (TREE_CODE (arg0) == NEGATE_EXPR
10885 && negate_expr_p (arg1)
10886 && reorder_operands_p (arg0, arg1))
10887 return fold_build2_loc (loc, MINUS_EXPR, type,
10888 fold_convert_loc (loc, type,
10889 negate_expr (arg1)),
10890 fold_convert_loc (loc, type,
10891 TREE_OPERAND (arg0, 0)));
10892 /* Convert -A - 1 to ~A. */
10893 if (TREE_CODE (type) != COMPLEX_TYPE
10894 && TREE_CODE (arg0) == NEGATE_EXPR
10895 && integer_onep (arg1)
10896 && !TYPE_OVERFLOW_TRAPS (type))
10897 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10898 fold_convert_loc (loc, type,
10899 TREE_OPERAND (arg0, 0)));
10901 /* Convert -1 - A to ~A. */
10902 if (TREE_CODE (type) != COMPLEX_TYPE
10903 && integer_all_onesp (arg0))
10904 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10907 /* X - (X / Y) * Y is X % Y. */
10908 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10909 && TREE_CODE (arg1) == MULT_EXPR
10910 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10911 && operand_equal_p (arg0,
10912 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10913 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10914 TREE_OPERAND (arg1, 1), 0))
10915 return
10916 fold_convert_loc (loc, type,
10917 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10918 arg0, TREE_OPERAND (arg1, 1)));
10920 if (! FLOAT_TYPE_P (type))
10922 if (integer_zerop (arg0))
10923 return negate_expr (fold_convert_loc (loc, type, arg1));
10924 if (integer_zerop (arg1))
10925 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10927 /* Fold A - (A & B) into ~B & A. */
10928 if (!TREE_SIDE_EFFECTS (arg0)
10929 && TREE_CODE (arg1) == BIT_AND_EXPR)
10931 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10933 tree arg10 = fold_convert_loc (loc, type,
10934 TREE_OPERAND (arg1, 0));
10935 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10936 fold_build1_loc (loc, BIT_NOT_EXPR,
10937 type, arg10),
10938 fold_convert_loc (loc, type, arg0));
10940 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10942 tree arg11 = fold_convert_loc (loc,
10943 type, TREE_OPERAND (arg1, 1));
10944 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10945 fold_build1_loc (loc, BIT_NOT_EXPR,
10946 type, arg11),
10947 fold_convert_loc (loc, type, arg0));
10951 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10952 any power of 2 minus 1. */
10953 if (TREE_CODE (arg0) == BIT_AND_EXPR
10954 && TREE_CODE (arg1) == BIT_AND_EXPR
10955 && operand_equal_p (TREE_OPERAND (arg0, 0),
10956 TREE_OPERAND (arg1, 0), 0))
10958 tree mask0 = TREE_OPERAND (arg0, 1);
10959 tree mask1 = TREE_OPERAND (arg1, 1);
10960 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10962 if (operand_equal_p (tem, mask1, 0))
10964 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10965 TREE_OPERAND (arg0, 0), mask1);
10966 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10971 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10972 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10973 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10975 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10976 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10977 (-ARG1 + ARG0) reduces to -ARG1. */
10978 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10979 return negate_expr (fold_convert_loc (loc, type, arg1));
10981 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10982 __complex__ ( x, -y ). This is not the same for SNaNs or if
10983 signed zeros are involved. */
10984 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10985 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10986 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10988 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10989 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10990 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10991 bool arg0rz = false, arg0iz = false;
10992 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10993 || (arg0i && (arg0iz = real_zerop (arg0i))))
10995 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10996 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10997 if (arg0rz && arg1i && real_zerop (arg1i))
10999 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11000 arg1r ? arg1r
11001 : build1 (REALPART_EXPR, rtype, arg1));
11002 tree ip = arg0i ? arg0i
11003 : build1 (IMAGPART_EXPR, rtype, arg0);
11004 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11006 else if (arg0iz && arg1r && real_zerop (arg1r))
11008 tree rp = arg0r ? arg0r
11009 : build1 (REALPART_EXPR, rtype, arg0);
11010 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11011 arg1i ? arg1i
11012 : build1 (IMAGPART_EXPR, rtype, arg1));
11013 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11018 /* Fold &x - &x. This can happen from &x.foo - &x.
11019 This is unsafe for certain floats even in non-IEEE formats.
11020 In IEEE, it is unsafe because it does wrong for NaNs.
11021 Also note that operand_equal_p is always false if an operand
11022 is volatile. */
11024 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
11025 && operand_equal_p (arg0, arg1, 0))
11026 return build_zero_cst (type);
11028 /* A - B -> A + (-B) if B is easily negatable. */
11029 if (negate_expr_p (arg1)
11030 && ((FLOAT_TYPE_P (type)
11031 /* Avoid this transformation if B is a positive REAL_CST. */
11032 && (TREE_CODE (arg1) != REAL_CST
11033 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
11034 || INTEGRAL_TYPE_P (type)))
11035 return fold_build2_loc (loc, PLUS_EXPR, type,
11036 fold_convert_loc (loc, type, arg0),
11037 fold_convert_loc (loc, type,
11038 negate_expr (arg1)));
11040 /* Try folding difference of addresses. */
11042 HOST_WIDE_INT diff;
11044 if ((TREE_CODE (arg0) == ADDR_EXPR
11045 || TREE_CODE (arg1) == ADDR_EXPR)
11046 && ptr_difference_const (arg0, arg1, &diff))
11047 return build_int_cst_type (type, diff);
11050 /* Fold &a[i] - &a[j] to i-j. */
11051 if (TREE_CODE (arg0) == ADDR_EXPR
11052 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11053 && TREE_CODE (arg1) == ADDR_EXPR
11054 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11056 tree tem = fold_addr_of_array_ref_difference (loc, type,
11057 TREE_OPERAND (arg0, 0),
11058 TREE_OPERAND (arg1, 0));
11059 if (tem)
11060 return tem;
11063 if (FLOAT_TYPE_P (type)
11064 && flag_unsafe_math_optimizations
11065 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11066 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11067 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11068 return tem;
11070 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11071 one. Make sure the type is not saturating and has the signedness of
11072 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11073 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11074 if ((TREE_CODE (arg0) == MULT_EXPR
11075 || TREE_CODE (arg1) == MULT_EXPR)
11076 && !TYPE_SATURATING (type)
11077 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11078 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11079 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11081 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11082 if (tem)
11083 return tem;
11086 goto associate;
11088 case MULT_EXPR:
11089 /* (-A) * (-B) -> A * B */
11090 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11091 return fold_build2_loc (loc, MULT_EXPR, type,
11092 fold_convert_loc (loc, type,
11093 TREE_OPERAND (arg0, 0)),
11094 fold_convert_loc (loc, type,
11095 negate_expr (arg1)));
11096 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11097 return fold_build2_loc (loc, MULT_EXPR, type,
11098 fold_convert_loc (loc, type,
11099 negate_expr (arg0)),
11100 fold_convert_loc (loc, type,
11101 TREE_OPERAND (arg1, 0)));
11103 if (! FLOAT_TYPE_P (type))
11105 if (integer_zerop (arg1))
11106 return omit_one_operand_loc (loc, type, arg1, arg0);
11107 if (integer_onep (arg1))
11108 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11109 /* Transform x * -1 into -x. Make sure to do the negation
11110 on the original operand with conversions not stripped
11111 because we can only strip non-sign-changing conversions. */
11112 if (integer_minus_onep (arg1))
11113 return fold_convert_loc (loc, type, negate_expr (op0));
11114 /* Transform x * -C into -x * C if x is easily negatable. */
11115 if (TREE_CODE (arg1) == INTEGER_CST
11116 && tree_int_cst_sgn (arg1) == -1
11117 && negate_expr_p (arg0)
11118 && (tem = negate_expr (arg1)) != arg1
11119 && !TREE_OVERFLOW (tem))
11120 return fold_build2_loc (loc, MULT_EXPR, type,
11121 fold_convert_loc (loc, type,
11122 negate_expr (arg0)),
11123 tem);
11125 /* (a * (1 << b)) is (a << b) */
11126 if (TREE_CODE (arg1) == LSHIFT_EXPR
11127 && integer_onep (TREE_OPERAND (arg1, 0)))
11128 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11129 TREE_OPERAND (arg1, 1));
11130 if (TREE_CODE (arg0) == LSHIFT_EXPR
11131 && integer_onep (TREE_OPERAND (arg0, 0)))
11132 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11133 TREE_OPERAND (arg0, 1));
11135 /* (A + A) * C -> A * 2 * C */
11136 if (TREE_CODE (arg0) == PLUS_EXPR
11137 && TREE_CODE (arg1) == INTEGER_CST
11138 && operand_equal_p (TREE_OPERAND (arg0, 0),
11139 TREE_OPERAND (arg0, 1), 0))
11140 return fold_build2_loc (loc, MULT_EXPR, type,
11141 omit_one_operand_loc (loc, type,
11142 TREE_OPERAND (arg0, 0),
11143 TREE_OPERAND (arg0, 1)),
11144 fold_build2_loc (loc, MULT_EXPR, type,
11145 build_int_cst (type, 2) , arg1));
11147 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11148 sign-changing only. */
11149 if (TREE_CODE (arg1) == INTEGER_CST
11150 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11151 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11152 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11154 strict_overflow_p = false;
11155 if (TREE_CODE (arg1) == INTEGER_CST
11156 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11157 &strict_overflow_p)))
11159 if (strict_overflow_p)
11160 fold_overflow_warning (("assuming signed overflow does not "
11161 "occur when simplifying "
11162 "multiplication"),
11163 WARN_STRICT_OVERFLOW_MISC);
11164 return fold_convert_loc (loc, type, tem);
11167 /* Optimize z * conj(z) for integer complex numbers. */
11168 if (TREE_CODE (arg0) == CONJ_EXPR
11169 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11170 return fold_mult_zconjz (loc, type, arg1);
11171 if (TREE_CODE (arg1) == CONJ_EXPR
11172 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11173 return fold_mult_zconjz (loc, type, arg0);
11175 else
11177 /* Maybe fold x * 0 to 0. The expressions aren't the same
11178 when x is NaN, since x * 0 is also NaN. Nor are they the
11179 same in modes with signed zeros, since multiplying a
11180 negative value by 0 gives -0, not +0. */
11181 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11182 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11183 && real_zerop (arg1))
11184 return omit_one_operand_loc (loc, type, arg1, arg0);
11185 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11186 Likewise for complex arithmetic with signed zeros. */
11187 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11188 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11189 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11190 && real_onep (arg1))
11191 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11193 /* Transform x * -1.0 into -x. */
11194 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11195 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11196 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11197 && real_minus_onep (arg1))
11198 return fold_convert_loc (loc, type, negate_expr (arg0));
11200 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11201 the result for floating point types due to rounding so it is applied
11202 only if -fassociative-math was specify. */
11203 if (flag_associative_math
11204 && TREE_CODE (arg0) == RDIV_EXPR
11205 && TREE_CODE (arg1) == REAL_CST
11206 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11208 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11209 arg1);
11210 if (tem)
11211 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11212 TREE_OPERAND (arg0, 1));
11215 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11216 if (operand_equal_p (arg0, arg1, 0))
11218 tree tem = fold_strip_sign_ops (arg0);
11219 if (tem != NULL_TREE)
11221 tem = fold_convert_loc (loc, type, tem);
11222 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11226 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11227 This is not the same for NaNs or if signed zeros are
11228 involved. */
11229 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11230 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11231 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11232 && TREE_CODE (arg1) == COMPLEX_CST
11233 && real_zerop (TREE_REALPART (arg1)))
11235 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11236 if (real_onep (TREE_IMAGPART (arg1)))
11237 return
11238 fold_build2_loc (loc, COMPLEX_EXPR, type,
11239 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11240 rtype, arg0)),
11241 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11242 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11243 return
11244 fold_build2_loc (loc, COMPLEX_EXPR, type,
11245 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11246 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11247 rtype, arg0)));
11250 /* Optimize z * conj(z) for floating point complex numbers.
11251 Guarded by flag_unsafe_math_optimizations as non-finite
11252 imaginary components don't produce scalar results. */
11253 if (flag_unsafe_math_optimizations
11254 && TREE_CODE (arg0) == CONJ_EXPR
11255 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11256 return fold_mult_zconjz (loc, type, arg1);
11257 if (flag_unsafe_math_optimizations
11258 && TREE_CODE (arg1) == CONJ_EXPR
11259 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11260 return fold_mult_zconjz (loc, type, arg0);
11262 if (flag_unsafe_math_optimizations)
11264 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11265 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11267 /* Optimizations of root(...)*root(...). */
11268 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11270 tree rootfn, arg;
11271 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11272 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11274 /* Optimize sqrt(x)*sqrt(x) as x. */
11275 if (BUILTIN_SQRT_P (fcode0)
11276 && operand_equal_p (arg00, arg10, 0)
11277 && ! HONOR_SNANS (TYPE_MODE (type)))
11278 return arg00;
11280 /* Optimize root(x)*root(y) as root(x*y). */
11281 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11282 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11283 return build_call_expr_loc (loc, rootfn, 1, arg);
11286 /* Optimize expN(x)*expN(y) as expN(x+y). */
11287 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11289 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11290 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11291 CALL_EXPR_ARG (arg0, 0),
11292 CALL_EXPR_ARG (arg1, 0));
11293 return build_call_expr_loc (loc, expfn, 1, arg);
11296 /* Optimizations of pow(...)*pow(...). */
11297 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11298 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11299 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11301 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11302 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11303 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11304 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11306 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11307 if (operand_equal_p (arg01, arg11, 0))
11309 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11310 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11311 arg00, arg10);
11312 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11315 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11316 if (operand_equal_p (arg00, arg10, 0))
11318 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11319 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11320 arg01, arg11);
11321 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11325 /* Optimize tan(x)*cos(x) as sin(x). */
11326 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11327 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11328 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11329 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11330 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11331 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11332 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11333 CALL_EXPR_ARG (arg1, 0), 0))
11335 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11337 if (sinfn != NULL_TREE)
11338 return build_call_expr_loc (loc, sinfn, 1,
11339 CALL_EXPR_ARG (arg0, 0));
11342 /* Optimize x*pow(x,c) as pow(x,c+1). */
11343 if (fcode1 == BUILT_IN_POW
11344 || fcode1 == BUILT_IN_POWF
11345 || fcode1 == BUILT_IN_POWL)
11347 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11348 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11349 if (TREE_CODE (arg11) == REAL_CST
11350 && !TREE_OVERFLOW (arg11)
11351 && operand_equal_p (arg0, arg10, 0))
11353 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11354 REAL_VALUE_TYPE c;
11355 tree arg;
11357 c = TREE_REAL_CST (arg11);
11358 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11359 arg = build_real (type, c);
11360 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11364 /* Optimize pow(x,c)*x as pow(x,c+1). */
11365 if (fcode0 == BUILT_IN_POW
11366 || fcode0 == BUILT_IN_POWF
11367 || fcode0 == BUILT_IN_POWL)
11369 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11370 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11371 if (TREE_CODE (arg01) == REAL_CST
11372 && !TREE_OVERFLOW (arg01)
11373 && operand_equal_p (arg1, arg00, 0))
11375 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11376 REAL_VALUE_TYPE c;
11377 tree arg;
11379 c = TREE_REAL_CST (arg01);
11380 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11381 arg = build_real (type, c);
11382 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11386 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11387 if (!in_gimple_form
11388 && optimize
11389 && operand_equal_p (arg0, arg1, 0))
11391 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11393 if (powfn)
11395 tree arg = build_real (type, dconst2);
11396 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11401 goto associate;
11403 case BIT_IOR_EXPR:
11404 bit_ior:
11405 if (integer_all_onesp (arg1))
11406 return omit_one_operand_loc (loc, type, arg1, arg0);
11407 if (integer_zerop (arg1))
11408 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11409 if (operand_equal_p (arg0, arg1, 0))
11410 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11412 /* ~X | X is -1. */
11413 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11414 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11416 t1 = build_zero_cst (type);
11417 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11418 return omit_one_operand_loc (loc, type, t1, arg1);
11421 /* X | ~X is -1. */
11422 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11423 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11425 t1 = build_zero_cst (type);
11426 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11427 return omit_one_operand_loc (loc, type, t1, arg0);
11430 /* Canonicalize (X & C1) | C2. */
11431 if (TREE_CODE (arg0) == BIT_AND_EXPR
11432 && TREE_CODE (arg1) == INTEGER_CST
11433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11435 double_int c1, c2, c3, msk;
11436 int width = TYPE_PRECISION (type), w;
11438 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11439 c2 = tree_to_double_int (arg1);
11441 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11442 if ((c1 & c2) == c1)
11443 return omit_one_operand_loc (loc, type, arg1,
11444 TREE_OPERAND (arg0, 0));
11446 msk = double_int::mask (width);
11448 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11449 if (msk.and_not (c1 | c2).is_zero ())
11450 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11451 TREE_OPERAND (arg0, 0), arg1);
11453 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11454 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11455 mode which allows further optimizations. */
11456 c1 &= msk;
11457 c2 &= msk;
11458 c3 = c1.and_not (c2);
11459 for (w = BITS_PER_UNIT;
11460 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11461 w <<= 1)
11463 unsigned HOST_WIDE_INT mask
11464 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11465 if (((c1.low | c2.low) & mask) == mask
11466 && (c1.low & ~mask) == 0 && c1.high == 0)
11468 c3 = double_int::from_uhwi (mask);
11469 break;
11473 if (c3 != c1)
11474 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11475 fold_build2_loc (loc, BIT_AND_EXPR, type,
11476 TREE_OPERAND (arg0, 0),
11477 double_int_to_tree (type,
11478 c3)),
11479 arg1);
11482 /* (X & Y) | Y is (X, Y). */
11483 if (TREE_CODE (arg0) == BIT_AND_EXPR
11484 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11485 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11486 /* (X & Y) | X is (Y, X). */
11487 if (TREE_CODE (arg0) == BIT_AND_EXPR
11488 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11489 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11490 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11491 /* X | (X & Y) is (Y, X). */
11492 if (TREE_CODE (arg1) == BIT_AND_EXPR
11493 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11494 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11495 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11496 /* X | (Y & X) is (Y, X). */
11497 if (TREE_CODE (arg1) == BIT_AND_EXPR
11498 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11499 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11500 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11502 /* (X & ~Y) | (~X & Y) is X ^ Y */
11503 if (TREE_CODE (arg0) == BIT_AND_EXPR
11504 && TREE_CODE (arg1) == BIT_AND_EXPR)
11506 tree a0, a1, l0, l1, n0, n1;
11508 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11509 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11511 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11512 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11514 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11515 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11517 if ((operand_equal_p (n0, a0, 0)
11518 && operand_equal_p (n1, a1, 0))
11519 || (operand_equal_p (n0, a1, 0)
11520 && operand_equal_p (n1, a0, 0)))
11521 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11524 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11525 if (t1 != NULL_TREE)
11526 return t1;
11528 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11530 This results in more efficient code for machines without a NAND
11531 instruction. Combine will canonicalize to the first form
11532 which will allow use of NAND instructions provided by the
11533 backend if they exist. */
11534 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11535 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11537 return
11538 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11539 build2 (BIT_AND_EXPR, type,
11540 fold_convert_loc (loc, type,
11541 TREE_OPERAND (arg0, 0)),
11542 fold_convert_loc (loc, type,
11543 TREE_OPERAND (arg1, 0))));
11546 /* See if this can be simplified into a rotate first. If that
11547 is unsuccessful continue in the association code. */
11548 goto bit_rotate;
11550 case BIT_XOR_EXPR:
11551 if (integer_zerop (arg1))
11552 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11553 if (integer_all_onesp (arg1))
11554 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11555 if (operand_equal_p (arg0, arg1, 0))
11556 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11558 /* ~X ^ X is -1. */
11559 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11560 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11562 t1 = build_zero_cst (type);
11563 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11564 return omit_one_operand_loc (loc, type, t1, arg1);
11567 /* X ^ ~X is -1. */
11568 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11569 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11571 t1 = build_zero_cst (type);
11572 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11573 return omit_one_operand_loc (loc, type, t1, arg0);
11576 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11577 with a constant, and the two constants have no bits in common,
11578 we should treat this as a BIT_IOR_EXPR since this may produce more
11579 simplifications. */
11580 if (TREE_CODE (arg0) == BIT_AND_EXPR
11581 && TREE_CODE (arg1) == BIT_AND_EXPR
11582 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11583 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11584 && integer_zerop (const_binop (BIT_AND_EXPR,
11585 TREE_OPERAND (arg0, 1),
11586 TREE_OPERAND (arg1, 1))))
11588 code = BIT_IOR_EXPR;
11589 goto bit_ior;
11592 /* (X | Y) ^ X -> Y & ~ X*/
11593 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11594 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11596 tree t2 = TREE_OPERAND (arg0, 1);
11597 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11598 arg1);
11599 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11600 fold_convert_loc (loc, type, t2),
11601 fold_convert_loc (loc, type, t1));
11602 return t1;
11605 /* (Y | X) ^ X -> Y & ~ X*/
11606 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11607 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11609 tree t2 = TREE_OPERAND (arg0, 0);
11610 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11611 arg1);
11612 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11613 fold_convert_loc (loc, type, t2),
11614 fold_convert_loc (loc, type, t1));
11615 return t1;
11618 /* X ^ (X | Y) -> Y & ~ X*/
11619 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11620 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11622 tree t2 = TREE_OPERAND (arg1, 1);
11623 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11624 arg0);
11625 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11626 fold_convert_loc (loc, type, t2),
11627 fold_convert_loc (loc, type, t1));
11628 return t1;
11631 /* X ^ (Y | X) -> Y & ~ X*/
11632 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11633 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11635 tree t2 = TREE_OPERAND (arg1, 0);
11636 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11637 arg0);
11638 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11639 fold_convert_loc (loc, type, t2),
11640 fold_convert_loc (loc, type, t1));
11641 return t1;
11644 /* Convert ~X ^ ~Y to X ^ Y. */
11645 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11646 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11647 return fold_build2_loc (loc, code, type,
11648 fold_convert_loc (loc, type,
11649 TREE_OPERAND (arg0, 0)),
11650 fold_convert_loc (loc, type,
11651 TREE_OPERAND (arg1, 0)));
11653 /* Convert ~X ^ C to X ^ ~C. */
11654 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11655 && TREE_CODE (arg1) == INTEGER_CST)
11656 return fold_build2_loc (loc, code, type,
11657 fold_convert_loc (loc, type,
11658 TREE_OPERAND (arg0, 0)),
11659 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11661 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11662 if (TREE_CODE (arg0) == BIT_AND_EXPR
11663 && integer_onep (TREE_OPERAND (arg0, 1))
11664 && integer_onep (arg1))
11665 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11666 build_zero_cst (TREE_TYPE (arg0)));
11668 /* Fold (X & Y) ^ Y as ~X & Y. */
11669 if (TREE_CODE (arg0) == BIT_AND_EXPR
11670 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11672 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11673 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11674 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11675 fold_convert_loc (loc, type, arg1));
11677 /* Fold (X & Y) ^ X as ~Y & X. */
11678 if (TREE_CODE (arg0) == BIT_AND_EXPR
11679 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11680 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11682 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11683 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11684 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11685 fold_convert_loc (loc, type, arg1));
11687 /* Fold X ^ (X & Y) as X & ~Y. */
11688 if (TREE_CODE (arg1) == BIT_AND_EXPR
11689 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11691 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11692 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11693 fold_convert_loc (loc, type, arg0),
11694 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11696 /* Fold X ^ (Y & X) as ~Y & X. */
11697 if (TREE_CODE (arg1) == BIT_AND_EXPR
11698 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11699 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11701 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11702 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11703 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11704 fold_convert_loc (loc, type, arg0));
11707 /* See if this can be simplified into a rotate first. If that
11708 is unsuccessful continue in the association code. */
11709 goto bit_rotate;
11711 case BIT_AND_EXPR:
11712 if (integer_all_onesp (arg1))
11713 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11714 if (integer_zerop (arg1))
11715 return omit_one_operand_loc (loc, type, arg1, arg0);
11716 if (operand_equal_p (arg0, arg1, 0))
11717 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11719 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11720 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11721 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11722 || (TREE_CODE (arg0) == EQ_EXPR
11723 && integer_zerop (TREE_OPERAND (arg0, 1))))
11724 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11725 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11727 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11728 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11729 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11730 || (TREE_CODE (arg1) == EQ_EXPR
11731 && integer_zerop (TREE_OPERAND (arg1, 1))))
11732 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11733 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11735 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11736 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11737 && TREE_CODE (arg1) == INTEGER_CST
11738 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11740 tree tmp1 = fold_convert_loc (loc, type, arg1);
11741 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11742 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11743 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11744 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11745 return
11746 fold_convert_loc (loc, type,
11747 fold_build2_loc (loc, BIT_IOR_EXPR,
11748 type, tmp2, tmp3));
11751 /* (X | Y) & Y is (X, Y). */
11752 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11753 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11754 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11755 /* (X | Y) & X is (Y, X). */
11756 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11757 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11758 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11759 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11760 /* X & (X | Y) is (Y, X). */
11761 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11762 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11763 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11764 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11765 /* X & (Y | X) is (Y, X). */
11766 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11767 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11768 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11769 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11771 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11772 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11773 && integer_onep (TREE_OPERAND (arg0, 1))
11774 && integer_onep (arg1))
11776 tree tem2;
11777 tem = TREE_OPERAND (arg0, 0);
11778 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11779 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11780 tem, tem2);
11781 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11782 build_zero_cst (TREE_TYPE (tem)));
11784 /* Fold ~X & 1 as (X & 1) == 0. */
11785 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11786 && integer_onep (arg1))
11788 tree tem2;
11789 tem = TREE_OPERAND (arg0, 0);
11790 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11791 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11792 tem, tem2);
11793 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11794 build_zero_cst (TREE_TYPE (tem)));
11796 /* Fold !X & 1 as X == 0. */
11797 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11798 && integer_onep (arg1))
11800 tem = TREE_OPERAND (arg0, 0);
11801 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11802 build_zero_cst (TREE_TYPE (tem)));
11805 /* Fold (X ^ Y) & Y as ~X & Y. */
11806 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11807 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11809 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11810 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11811 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11812 fold_convert_loc (loc, type, arg1));
11814 /* Fold (X ^ Y) & X as ~Y & X. */
11815 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11816 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11817 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11819 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11820 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11821 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11822 fold_convert_loc (loc, type, arg1));
11824 /* Fold X & (X ^ Y) as X & ~Y. */
11825 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11826 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11828 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11829 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11830 fold_convert_loc (loc, type, arg0),
11831 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11833 /* Fold X & (Y ^ X) as ~Y & X. */
11834 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11835 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11836 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11838 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11839 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11840 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11841 fold_convert_loc (loc, type, arg0));
11844 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11845 multiple of 1 << CST. */
11846 if (TREE_CODE (arg1) == INTEGER_CST)
11848 double_int cst1 = tree_to_double_int (arg1);
11849 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11850 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11851 if ((cst1 & ncst1) == ncst1
11852 && multiple_of_p (type, arg0,
11853 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11854 return fold_convert_loc (loc, type, arg0);
11857 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11858 bits from CST2. */
11859 if (TREE_CODE (arg1) == INTEGER_CST
11860 && TREE_CODE (arg0) == MULT_EXPR
11861 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11863 double_int darg1 = tree_to_double_int (arg1);
11864 double_int masked
11865 = mask_with_tz (type, darg1,
11866 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11868 if (masked.is_zero ())
11869 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11870 arg0, arg1);
11871 else if (masked != darg1)
11873 /* Avoid the transform if arg1 is a mask of some
11874 mode which allows further optimizations. */
11875 int pop = darg1.popcount ();
11876 if (!(pop >= BITS_PER_UNIT
11877 && exact_log2 (pop) != -1
11878 && double_int::mask (pop) == darg1))
11879 return fold_build2_loc (loc, code, type, op0,
11880 double_int_to_tree (type, masked));
11884 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11885 ((A & N) + B) & M -> (A + B) & M
11886 Similarly if (N & M) == 0,
11887 ((A | N) + B) & M -> (A + B) & M
11888 and for - instead of + (or unary - instead of +)
11889 and/or ^ instead of |.
11890 If B is constant and (B & M) == 0, fold into A & M. */
11891 if (tree_fits_uhwi_p (arg1))
11893 unsigned HOST_WIDE_INT cst1 = tree_to_uhwi (arg1);
11894 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11895 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11896 && (TREE_CODE (arg0) == PLUS_EXPR
11897 || TREE_CODE (arg0) == MINUS_EXPR
11898 || TREE_CODE (arg0) == NEGATE_EXPR)
11899 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11900 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11902 tree pmop[2];
11903 int which = 0;
11904 unsigned HOST_WIDE_INT cst0;
11906 /* Now we know that arg0 is (C + D) or (C - D) or
11907 -C and arg1 (M) is == (1LL << cst) - 1.
11908 Store C into PMOP[0] and D into PMOP[1]. */
11909 pmop[0] = TREE_OPERAND (arg0, 0);
11910 pmop[1] = NULL;
11911 if (TREE_CODE (arg0) != NEGATE_EXPR)
11913 pmop[1] = TREE_OPERAND (arg0, 1);
11914 which = 1;
11917 if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11918 || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11919 & cst1) != cst1)
11920 which = -1;
11922 for (; which >= 0; which--)
11923 switch (TREE_CODE (pmop[which]))
11925 case BIT_AND_EXPR:
11926 case BIT_IOR_EXPR:
11927 case BIT_XOR_EXPR:
11928 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11929 != INTEGER_CST)
11930 break;
11931 /* tree_to_[su]hwi not used, because we don't care about
11932 the upper bits. */
11933 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11934 cst0 &= cst1;
11935 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11937 if (cst0 != cst1)
11938 break;
11940 else if (cst0 != 0)
11941 break;
11942 /* If C or D is of the form (A & N) where
11943 (N & M) == M, or of the form (A | N) or
11944 (A ^ N) where (N & M) == 0, replace it with A. */
11945 pmop[which] = TREE_OPERAND (pmop[which], 0);
11946 break;
11947 case INTEGER_CST:
11948 /* If C or D is a N where (N & M) == 0, it can be
11949 omitted (assumed 0). */
11950 if ((TREE_CODE (arg0) == PLUS_EXPR
11951 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11952 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11953 pmop[which] = NULL;
11954 break;
11955 default:
11956 break;
11959 /* Only build anything new if we optimized one or both arguments
11960 above. */
11961 if (pmop[0] != TREE_OPERAND (arg0, 0)
11962 || (TREE_CODE (arg0) != NEGATE_EXPR
11963 && pmop[1] != TREE_OPERAND (arg0, 1)))
11965 tree utype = TREE_TYPE (arg0);
11966 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11968 /* Perform the operations in a type that has defined
11969 overflow behavior. */
11970 utype = unsigned_type_for (TREE_TYPE (arg0));
11971 if (pmop[0] != NULL)
11972 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11973 if (pmop[1] != NULL)
11974 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11977 if (TREE_CODE (arg0) == NEGATE_EXPR)
11978 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11979 else if (TREE_CODE (arg0) == PLUS_EXPR)
11981 if (pmop[0] != NULL && pmop[1] != NULL)
11982 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11983 pmop[0], pmop[1]);
11984 else if (pmop[0] != NULL)
11985 tem = pmop[0];
11986 else if (pmop[1] != NULL)
11987 tem = pmop[1];
11988 else
11989 return build_int_cst (type, 0);
11991 else if (pmop[0] == NULL)
11992 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11993 else
11994 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11995 pmop[0], pmop[1]);
11996 /* TEM is now the new binary +, - or unary - replacement. */
11997 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11998 fold_convert_loc (loc, utype, arg1));
11999 return fold_convert_loc (loc, type, tem);
12004 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
12005 if (t1 != NULL_TREE)
12006 return t1;
12007 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12008 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12009 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12011 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12013 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
12014 && (~TREE_INT_CST_LOW (arg1)
12015 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
12016 return
12017 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12020 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12022 This results in more efficient code for machines without a NOR
12023 instruction. Combine will canonicalize to the first form
12024 which will allow use of NOR instructions provided by the
12025 backend if they exist. */
12026 if (TREE_CODE (arg0) == BIT_NOT_EXPR
12027 && TREE_CODE (arg1) == BIT_NOT_EXPR)
12029 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
12030 build2 (BIT_IOR_EXPR, type,
12031 fold_convert_loc (loc, type,
12032 TREE_OPERAND (arg0, 0)),
12033 fold_convert_loc (loc, type,
12034 TREE_OPERAND (arg1, 0))));
12037 /* If arg0 is derived from the address of an object or function, we may
12038 be able to fold this expression using the object or function's
12039 alignment. */
12040 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
12042 unsigned HOST_WIDE_INT modulus, residue;
12043 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
12045 modulus = get_pointer_modulus_and_residue (arg0, &residue,
12046 integer_onep (arg1));
12048 /* This works because modulus is a power of 2. If this weren't the
12049 case, we'd have to replace it by its greatest power-of-2
12050 divisor: modulus & -modulus. */
12051 if (low < modulus)
12052 return build_int_cst (type, residue & low);
12055 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12056 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12057 if the new mask might be further optimized. */
12058 if ((TREE_CODE (arg0) == LSHIFT_EXPR
12059 || TREE_CODE (arg0) == RSHIFT_EXPR)
12060 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12061 && TREE_CODE (arg1) == INTEGER_CST
12062 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12063 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12064 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12065 < TYPE_PRECISION (TREE_TYPE (arg0))))
12067 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12068 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12069 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12070 tree shift_type = TREE_TYPE (arg0);
12072 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12073 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12074 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12075 && TYPE_PRECISION (TREE_TYPE (arg0))
12076 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12078 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12079 tree arg00 = TREE_OPERAND (arg0, 0);
12080 /* See if more bits can be proven as zero because of
12081 zero extension. */
12082 if (TREE_CODE (arg00) == NOP_EXPR
12083 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12085 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12086 if (TYPE_PRECISION (inner_type)
12087 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12088 && TYPE_PRECISION (inner_type) < prec)
12090 prec = TYPE_PRECISION (inner_type);
12091 /* See if we can shorten the right shift. */
12092 if (shiftc < prec)
12093 shift_type = inner_type;
12094 /* Otherwise X >> C1 is all zeros, so we'll optimize
12095 it into (X, 0) later on by making sure zerobits
12096 is all ones. */
12099 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12100 if (shiftc < prec)
12102 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12103 zerobits <<= prec - shiftc;
12105 /* For arithmetic shift if sign bit could be set, zerobits
12106 can contain actually sign bits, so no transformation is
12107 possible, unless MASK masks them all away. In that
12108 case the shift needs to be converted into logical shift. */
12109 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12110 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12112 if ((mask & zerobits) == 0)
12113 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12114 else
12115 zerobits = 0;
12119 /* ((X << 16) & 0xff00) is (X, 0). */
12120 if ((mask & zerobits) == mask)
12121 return omit_one_operand_loc (loc, type,
12122 build_int_cst (type, 0), arg0);
12124 newmask = mask | zerobits;
12125 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12127 /* Only do the transformation if NEWMASK is some integer
12128 mode's mask. */
12129 for (prec = BITS_PER_UNIT;
12130 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12131 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12132 break;
12133 if (prec < HOST_BITS_PER_WIDE_INT
12134 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12136 tree newmaskt;
12138 if (shift_type != TREE_TYPE (arg0))
12140 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12141 fold_convert_loc (loc, shift_type,
12142 TREE_OPERAND (arg0, 0)),
12143 TREE_OPERAND (arg0, 1));
12144 tem = fold_convert_loc (loc, type, tem);
12146 else
12147 tem = op0;
12148 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12149 if (!tree_int_cst_equal (newmaskt, arg1))
12150 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12155 goto associate;
12157 case RDIV_EXPR:
12158 /* Don't touch a floating-point divide by zero unless the mode
12159 of the constant can represent infinity. */
12160 if (TREE_CODE (arg1) == REAL_CST
12161 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12162 && real_zerop (arg1))
12163 return NULL_TREE;
12165 /* Optimize A / A to 1.0 if we don't care about
12166 NaNs or Infinities. Skip the transformation
12167 for non-real operands. */
12168 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12169 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12170 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12171 && operand_equal_p (arg0, arg1, 0))
12173 tree r = build_real (TREE_TYPE (arg0), dconst1);
12175 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12178 /* The complex version of the above A / A optimization. */
12179 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12180 && operand_equal_p (arg0, arg1, 0))
12182 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12183 if (! HONOR_NANS (TYPE_MODE (elem_type))
12184 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12186 tree r = build_real (elem_type, dconst1);
12187 /* omit_two_operands will call fold_convert for us. */
12188 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12192 /* (-A) / (-B) -> A / B */
12193 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12194 return fold_build2_loc (loc, RDIV_EXPR, type,
12195 TREE_OPERAND (arg0, 0),
12196 negate_expr (arg1));
12197 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12198 return fold_build2_loc (loc, RDIV_EXPR, type,
12199 negate_expr (arg0),
12200 TREE_OPERAND (arg1, 0));
12202 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12203 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12204 && real_onep (arg1))
12205 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12207 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12208 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12209 && real_minus_onep (arg1))
12210 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12211 negate_expr (arg0)));
12213 /* If ARG1 is a constant, we can convert this to a multiply by the
12214 reciprocal. This does not have the same rounding properties,
12215 so only do this if -freciprocal-math. We can actually
12216 always safely do it if ARG1 is a power of two, but it's hard to
12217 tell if it is or not in a portable manner. */
12218 if (optimize
12219 && (TREE_CODE (arg1) == REAL_CST
12220 || (TREE_CODE (arg1) == COMPLEX_CST
12221 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12222 || (TREE_CODE (arg1) == VECTOR_CST
12223 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12225 if (flag_reciprocal_math
12226 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12227 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12228 /* Find the reciprocal if optimizing and the result is exact.
12229 TODO: Complex reciprocal not implemented. */
12230 if (TREE_CODE (arg1) != COMPLEX_CST)
12232 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12234 if (inverse)
12235 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12238 /* Convert A/B/C to A/(B*C). */
12239 if (flag_reciprocal_math
12240 && TREE_CODE (arg0) == RDIV_EXPR)
12241 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12242 fold_build2_loc (loc, MULT_EXPR, type,
12243 TREE_OPERAND (arg0, 1), arg1));
12245 /* Convert A/(B/C) to (A/B)*C. */
12246 if (flag_reciprocal_math
12247 && TREE_CODE (arg1) == RDIV_EXPR)
12248 return fold_build2_loc (loc, MULT_EXPR, type,
12249 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12250 TREE_OPERAND (arg1, 0)),
12251 TREE_OPERAND (arg1, 1));
12253 /* Convert C1/(X*C2) into (C1/C2)/X. */
12254 if (flag_reciprocal_math
12255 && TREE_CODE (arg1) == MULT_EXPR
12256 && TREE_CODE (arg0) == REAL_CST
12257 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12259 tree tem = const_binop (RDIV_EXPR, arg0,
12260 TREE_OPERAND (arg1, 1));
12261 if (tem)
12262 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12263 TREE_OPERAND (arg1, 0));
12266 if (flag_unsafe_math_optimizations)
12268 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12269 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12271 /* Optimize sin(x)/cos(x) as tan(x). */
12272 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12273 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12274 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12275 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12276 CALL_EXPR_ARG (arg1, 0), 0))
12278 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12280 if (tanfn != NULL_TREE)
12281 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12284 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12285 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12286 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12287 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12288 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12289 CALL_EXPR_ARG (arg1, 0), 0))
12291 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12293 if (tanfn != NULL_TREE)
12295 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12296 CALL_EXPR_ARG (arg0, 0));
12297 return fold_build2_loc (loc, RDIV_EXPR, type,
12298 build_real (type, dconst1), tmp);
12302 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12303 NaNs or Infinities. */
12304 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12305 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12306 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12308 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12309 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12311 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12312 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12313 && operand_equal_p (arg00, arg01, 0))
12315 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12317 if (cosfn != NULL_TREE)
12318 return build_call_expr_loc (loc, cosfn, 1, arg00);
12322 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12323 NaNs or Infinities. */
12324 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12325 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12326 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12328 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12329 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12331 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12332 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12333 && operand_equal_p (arg00, arg01, 0))
12335 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12337 if (cosfn != NULL_TREE)
12339 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12340 return fold_build2_loc (loc, RDIV_EXPR, type,
12341 build_real (type, dconst1),
12342 tmp);
12347 /* Optimize pow(x,c)/x as pow(x,c-1). */
12348 if (fcode0 == BUILT_IN_POW
12349 || fcode0 == BUILT_IN_POWF
12350 || fcode0 == BUILT_IN_POWL)
12352 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12353 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12354 if (TREE_CODE (arg01) == REAL_CST
12355 && !TREE_OVERFLOW (arg01)
12356 && operand_equal_p (arg1, arg00, 0))
12358 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12359 REAL_VALUE_TYPE c;
12360 tree arg;
12362 c = TREE_REAL_CST (arg01);
12363 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12364 arg = build_real (type, c);
12365 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12369 /* Optimize a/root(b/c) into a*root(c/b). */
12370 if (BUILTIN_ROOT_P (fcode1))
12372 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12374 if (TREE_CODE (rootarg) == RDIV_EXPR)
12376 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12377 tree b = TREE_OPERAND (rootarg, 0);
12378 tree c = TREE_OPERAND (rootarg, 1);
12380 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12382 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12383 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12387 /* Optimize x/expN(y) into x*expN(-y). */
12388 if (BUILTIN_EXPONENT_P (fcode1))
12390 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12391 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12392 arg1 = build_call_expr_loc (loc,
12393 expfn, 1,
12394 fold_convert_loc (loc, type, arg));
12395 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12398 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12399 if (fcode1 == BUILT_IN_POW
12400 || fcode1 == BUILT_IN_POWF
12401 || fcode1 == BUILT_IN_POWL)
12403 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12404 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12405 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12406 tree neg11 = fold_convert_loc (loc, type,
12407 negate_expr (arg11));
12408 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12409 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12412 return NULL_TREE;
12414 case TRUNC_DIV_EXPR:
12415 /* Optimize (X & (-A)) / A where A is a power of 2,
12416 to X >> log2(A) */
12417 if (TREE_CODE (arg0) == BIT_AND_EXPR
12418 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12419 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12421 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12422 arg1, TREE_OPERAND (arg0, 1));
12423 if (sum && integer_zerop (sum)) {
12424 unsigned long pow2;
12426 if (TREE_INT_CST_LOW (arg1))
12427 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12428 else
12429 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12430 + HOST_BITS_PER_WIDE_INT;
12432 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12433 TREE_OPERAND (arg0, 0),
12434 build_int_cst (integer_type_node, pow2));
12438 /* Fall through */
12440 case FLOOR_DIV_EXPR:
12441 /* Simplify A / (B << N) where A and B are positive and B is
12442 a power of 2, to A >> (N + log2(B)). */
12443 strict_overflow_p = false;
12444 if (TREE_CODE (arg1) == LSHIFT_EXPR
12445 && (TYPE_UNSIGNED (type)
12446 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12448 tree sval = TREE_OPERAND (arg1, 0);
12449 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12451 tree sh_cnt = TREE_OPERAND (arg1, 1);
12452 unsigned long pow2;
12454 if (TREE_INT_CST_LOW (sval))
12455 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12456 else
12457 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12458 + HOST_BITS_PER_WIDE_INT;
12460 if (strict_overflow_p)
12461 fold_overflow_warning (("assuming signed overflow does not "
12462 "occur when simplifying A / (B << N)"),
12463 WARN_STRICT_OVERFLOW_MISC);
12465 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12466 sh_cnt,
12467 build_int_cst (TREE_TYPE (sh_cnt),
12468 pow2));
12469 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12470 fold_convert_loc (loc, type, arg0), sh_cnt);
12474 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12475 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12476 if (INTEGRAL_TYPE_P (type)
12477 && TYPE_UNSIGNED (type)
12478 && code == FLOOR_DIV_EXPR)
12479 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12481 /* Fall through */
12483 case ROUND_DIV_EXPR:
12484 case CEIL_DIV_EXPR:
12485 case EXACT_DIV_EXPR:
12486 if (integer_onep (arg1))
12487 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12488 if (integer_zerop (arg1))
12489 return NULL_TREE;
12490 /* X / -1 is -X. */
12491 if (!TYPE_UNSIGNED (type)
12492 && TREE_CODE (arg1) == INTEGER_CST
12493 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12494 && TREE_INT_CST_HIGH (arg1) == -1)
12495 return fold_convert_loc (loc, type, negate_expr (arg0));
12497 /* Convert -A / -B to A / B when the type is signed and overflow is
12498 undefined. */
12499 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12500 && TREE_CODE (arg0) == NEGATE_EXPR
12501 && negate_expr_p (arg1))
12503 if (INTEGRAL_TYPE_P (type))
12504 fold_overflow_warning (("assuming signed overflow does not occur "
12505 "when distributing negation across "
12506 "division"),
12507 WARN_STRICT_OVERFLOW_MISC);
12508 return fold_build2_loc (loc, code, type,
12509 fold_convert_loc (loc, type,
12510 TREE_OPERAND (arg0, 0)),
12511 fold_convert_loc (loc, type,
12512 negate_expr (arg1)));
12514 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12515 && TREE_CODE (arg1) == NEGATE_EXPR
12516 && negate_expr_p (arg0))
12518 if (INTEGRAL_TYPE_P (type))
12519 fold_overflow_warning (("assuming signed overflow does not occur "
12520 "when distributing negation across "
12521 "division"),
12522 WARN_STRICT_OVERFLOW_MISC);
12523 return fold_build2_loc (loc, code, type,
12524 fold_convert_loc (loc, type,
12525 negate_expr (arg0)),
12526 fold_convert_loc (loc, type,
12527 TREE_OPERAND (arg1, 0)));
12530 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12531 operation, EXACT_DIV_EXPR.
12533 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12534 At one time others generated faster code, it's not clear if they do
12535 after the last round to changes to the DIV code in expmed.c. */
12536 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12537 && multiple_of_p (type, arg0, arg1))
12538 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12540 strict_overflow_p = false;
12541 if (TREE_CODE (arg1) == INTEGER_CST
12542 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12543 &strict_overflow_p)))
12545 if (strict_overflow_p)
12546 fold_overflow_warning (("assuming signed overflow does not occur "
12547 "when simplifying division"),
12548 WARN_STRICT_OVERFLOW_MISC);
12549 return fold_convert_loc (loc, type, tem);
12552 return NULL_TREE;
12554 case CEIL_MOD_EXPR:
12555 case FLOOR_MOD_EXPR:
12556 case ROUND_MOD_EXPR:
12557 case TRUNC_MOD_EXPR:
12558 /* X % 1 is always zero, but be sure to preserve any side
12559 effects in X. */
12560 if (integer_onep (arg1))
12561 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12563 /* X % 0, return X % 0 unchanged so that we can get the
12564 proper warnings and errors. */
12565 if (integer_zerop (arg1))
12566 return NULL_TREE;
12568 /* 0 % X is always zero, but be sure to preserve any side
12569 effects in X. Place this after checking for X == 0. */
12570 if (integer_zerop (arg0))
12571 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12573 /* X % -1 is zero. */
12574 if (!TYPE_UNSIGNED (type)
12575 && TREE_CODE (arg1) == INTEGER_CST
12576 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12577 && TREE_INT_CST_HIGH (arg1) == -1)
12578 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12580 /* X % -C is the same as X % C. */
12581 if (code == TRUNC_MOD_EXPR
12582 && !TYPE_UNSIGNED (type)
12583 && TREE_CODE (arg1) == INTEGER_CST
12584 && !TREE_OVERFLOW (arg1)
12585 && TREE_INT_CST_HIGH (arg1) < 0
12586 && !TYPE_OVERFLOW_TRAPS (type)
12587 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12588 && !sign_bit_p (arg1, arg1))
12589 return fold_build2_loc (loc, code, type,
12590 fold_convert_loc (loc, type, arg0),
12591 fold_convert_loc (loc, type,
12592 negate_expr (arg1)));
12594 /* X % -Y is the same as X % Y. */
12595 if (code == TRUNC_MOD_EXPR
12596 && !TYPE_UNSIGNED (type)
12597 && TREE_CODE (arg1) == NEGATE_EXPR
12598 && !TYPE_OVERFLOW_TRAPS (type))
12599 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12600 fold_convert_loc (loc, type,
12601 TREE_OPERAND (arg1, 0)));
12603 strict_overflow_p = false;
12604 if (TREE_CODE (arg1) == INTEGER_CST
12605 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12606 &strict_overflow_p)))
12608 if (strict_overflow_p)
12609 fold_overflow_warning (("assuming signed overflow does not occur "
12610 "when simplifying modulus"),
12611 WARN_STRICT_OVERFLOW_MISC);
12612 return fold_convert_loc (loc, type, tem);
12615 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12616 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12617 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12618 && (TYPE_UNSIGNED (type)
12619 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12621 tree c = arg1;
12622 /* Also optimize A % (C << N) where C is a power of 2,
12623 to A & ((C << N) - 1). */
12624 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12625 c = TREE_OPERAND (arg1, 0);
12627 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12629 tree mask
12630 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12631 build_int_cst (TREE_TYPE (arg1), 1));
12632 if (strict_overflow_p)
12633 fold_overflow_warning (("assuming signed overflow does not "
12634 "occur when simplifying "
12635 "X % (power of two)"),
12636 WARN_STRICT_OVERFLOW_MISC);
12637 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12638 fold_convert_loc (loc, type, arg0),
12639 fold_convert_loc (loc, type, mask));
12643 return NULL_TREE;
12645 case LROTATE_EXPR:
12646 case RROTATE_EXPR:
12647 if (integer_all_onesp (arg0))
12648 return omit_one_operand_loc (loc, type, arg0, arg1);
12649 goto shift;
12651 case RSHIFT_EXPR:
12652 /* Optimize -1 >> x for arithmetic right shifts. */
12653 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12654 && tree_expr_nonnegative_p (arg1))
12655 return omit_one_operand_loc (loc, type, arg0, arg1);
12656 /* ... fall through ... */
12658 case LSHIFT_EXPR:
12659 shift:
12660 if (integer_zerop (arg1))
12661 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12662 if (integer_zerop (arg0))
12663 return omit_one_operand_loc (loc, type, arg0, arg1);
12665 /* Prefer vector1 << scalar to vector1 << vector2
12666 if vector2 is uniform. */
12667 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12668 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12669 return fold_build2_loc (loc, code, type, op0, tem);
12671 /* Since negative shift count is not well-defined,
12672 don't try to compute it in the compiler. */
12673 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12674 return NULL_TREE;
12676 prec = element_precision (type);
12678 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12679 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12680 && tree_to_uhwi (arg1) < prec
12681 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12682 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12684 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12685 + tree_to_uhwi (arg1));
12687 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12688 being well defined. */
12689 if (low >= prec)
12691 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12692 low = low % prec;
12693 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12694 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12695 TREE_OPERAND (arg0, 0));
12696 else
12697 low = prec - 1;
12700 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12701 build_int_cst (TREE_TYPE (arg1), low));
12704 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12705 into x & ((unsigned)-1 >> c) for unsigned types. */
12706 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12707 || (TYPE_UNSIGNED (type)
12708 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12709 && tree_fits_uhwi_p (arg1)
12710 && tree_to_uhwi (arg1) < prec
12711 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12712 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12714 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12715 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12716 tree lshift;
12717 tree arg00;
12719 if (low0 == low1)
12721 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12723 lshift = build_minus_one_cst (type);
12724 lshift = const_binop (code, lshift, arg1);
12726 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12730 /* Rewrite an LROTATE_EXPR by a constant into an
12731 RROTATE_EXPR by a new constant. */
12732 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12734 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12735 tem = const_binop (MINUS_EXPR, tem, arg1);
12736 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12739 /* If we have a rotate of a bit operation with the rotate count and
12740 the second operand of the bit operation both constant,
12741 permute the two operations. */
12742 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12743 && (TREE_CODE (arg0) == BIT_AND_EXPR
12744 || TREE_CODE (arg0) == BIT_IOR_EXPR
12745 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12746 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12747 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12748 fold_build2_loc (loc, code, type,
12749 TREE_OPERAND (arg0, 0), arg1),
12750 fold_build2_loc (loc, code, type,
12751 TREE_OPERAND (arg0, 1), arg1));
12753 /* Two consecutive rotates adding up to the precision of the
12754 type can be ignored. */
12755 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12756 && TREE_CODE (arg0) == RROTATE_EXPR
12757 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12758 && TREE_INT_CST_HIGH (arg1) == 0
12759 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12760 && ((TREE_INT_CST_LOW (arg1)
12761 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12762 == prec))
12763 return TREE_OPERAND (arg0, 0);
12765 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12766 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12767 if the latter can be further optimized. */
12768 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12769 && TREE_CODE (arg0) == BIT_AND_EXPR
12770 && TREE_CODE (arg1) == INTEGER_CST
12771 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12773 tree mask = fold_build2_loc (loc, code, type,
12774 fold_convert_loc (loc, type,
12775 TREE_OPERAND (arg0, 1)),
12776 arg1);
12777 tree shift = fold_build2_loc (loc, code, type,
12778 fold_convert_loc (loc, type,
12779 TREE_OPERAND (arg0, 0)),
12780 arg1);
12781 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12782 if (tem)
12783 return tem;
12786 return NULL_TREE;
12788 case MIN_EXPR:
12789 if (operand_equal_p (arg0, arg1, 0))
12790 return omit_one_operand_loc (loc, type, arg0, arg1);
12791 if (INTEGRAL_TYPE_P (type)
12792 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12793 return omit_one_operand_loc (loc, type, arg1, arg0);
12794 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12795 if (tem)
12796 return tem;
12797 goto associate;
12799 case MAX_EXPR:
12800 if (operand_equal_p (arg0, arg1, 0))
12801 return omit_one_operand_loc (loc, type, arg0, arg1);
12802 if (INTEGRAL_TYPE_P (type)
12803 && TYPE_MAX_VALUE (type)
12804 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12805 return omit_one_operand_loc (loc, type, arg1, arg0);
12806 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12807 if (tem)
12808 return tem;
12809 goto associate;
12811 case TRUTH_ANDIF_EXPR:
12812 /* Note that the operands of this must be ints
12813 and their values must be 0 or 1.
12814 ("true" is a fixed value perhaps depending on the language.) */
12815 /* If first arg is constant zero, return it. */
12816 if (integer_zerop (arg0))
12817 return fold_convert_loc (loc, type, arg0);
12818 case TRUTH_AND_EXPR:
12819 /* If either arg is constant true, drop it. */
12820 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12821 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12822 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12823 /* Preserve sequence points. */
12824 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12825 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12826 /* If second arg is constant zero, result is zero, but first arg
12827 must be evaluated. */
12828 if (integer_zerop (arg1))
12829 return omit_one_operand_loc (loc, type, arg1, arg0);
12830 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12831 case will be handled here. */
12832 if (integer_zerop (arg0))
12833 return omit_one_operand_loc (loc, type, arg0, arg1);
12835 /* !X && X is always false. */
12836 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12837 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12838 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12839 /* X && !X is always false. */
12840 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12841 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12842 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12844 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12845 means A >= Y && A != MAX, but in this case we know that
12846 A < X <= MAX. */
12848 if (!TREE_SIDE_EFFECTS (arg0)
12849 && !TREE_SIDE_EFFECTS (arg1))
12851 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12852 if (tem && !operand_equal_p (tem, arg0, 0))
12853 return fold_build2_loc (loc, code, type, tem, arg1);
12855 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12856 if (tem && !operand_equal_p (tem, arg1, 0))
12857 return fold_build2_loc (loc, code, type, arg0, tem);
12860 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12861 != NULL_TREE)
12862 return tem;
12864 return NULL_TREE;
12866 case TRUTH_ORIF_EXPR:
12867 /* Note that the operands of this must be ints
12868 and their values must be 0 or true.
12869 ("true" is a fixed value perhaps depending on the language.) */
12870 /* If first arg is constant true, return it. */
12871 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12872 return fold_convert_loc (loc, type, arg0);
12873 case TRUTH_OR_EXPR:
12874 /* If either arg is constant zero, drop it. */
12875 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12876 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12877 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12878 /* Preserve sequence points. */
12879 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12880 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12881 /* If second arg is constant true, result is true, but we must
12882 evaluate first arg. */
12883 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12884 return omit_one_operand_loc (loc, type, arg1, arg0);
12885 /* Likewise for first arg, but note this only occurs here for
12886 TRUTH_OR_EXPR. */
12887 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12888 return omit_one_operand_loc (loc, type, arg0, arg1);
12890 /* !X || X is always true. */
12891 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12892 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12893 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12894 /* X || !X is always true. */
12895 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12896 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12897 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12899 /* (X && !Y) || (!X && Y) is X ^ Y */
12900 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12901 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12903 tree a0, a1, l0, l1, n0, n1;
12905 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12906 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12908 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12909 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12911 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12912 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12914 if ((operand_equal_p (n0, a0, 0)
12915 && operand_equal_p (n1, a1, 0))
12916 || (operand_equal_p (n0, a1, 0)
12917 && operand_equal_p (n1, a0, 0)))
12918 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12921 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12922 != NULL_TREE)
12923 return tem;
12925 return NULL_TREE;
12927 case TRUTH_XOR_EXPR:
12928 /* If the second arg is constant zero, drop it. */
12929 if (integer_zerop (arg1))
12930 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12931 /* If the second arg is constant true, this is a logical inversion. */
12932 if (integer_onep (arg1))
12934 tem = invert_truthvalue_loc (loc, arg0);
12935 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12937 /* Identical arguments cancel to zero. */
12938 if (operand_equal_p (arg0, arg1, 0))
12939 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12941 /* !X ^ X is always true. */
12942 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12943 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12944 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12946 /* X ^ !X is always true. */
12947 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12949 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12951 return NULL_TREE;
12953 case EQ_EXPR:
12954 case NE_EXPR:
12955 STRIP_NOPS (arg0);
12956 STRIP_NOPS (arg1);
12958 tem = fold_comparison (loc, code, type, op0, op1);
12959 if (tem != NULL_TREE)
12960 return tem;
12962 /* bool_var != 0 becomes bool_var. */
12963 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12964 && code == NE_EXPR)
12965 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12967 /* bool_var == 1 becomes bool_var. */
12968 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12969 && code == EQ_EXPR)
12970 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12972 /* bool_var != 1 becomes !bool_var. */
12973 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12974 && code == NE_EXPR)
12975 return fold_convert_loc (loc, type,
12976 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12977 TREE_TYPE (arg0), arg0));
12979 /* bool_var == 0 becomes !bool_var. */
12980 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12981 && code == EQ_EXPR)
12982 return fold_convert_loc (loc, type,
12983 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12984 TREE_TYPE (arg0), arg0));
12986 /* !exp != 0 becomes !exp */
12987 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12988 && code == NE_EXPR)
12989 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12991 /* If this is an equality comparison of the address of two non-weak,
12992 unaliased symbols neither of which are extern (since we do not
12993 have access to attributes for externs), then we know the result. */
12994 if (TREE_CODE (arg0) == ADDR_EXPR
12995 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12996 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12997 && ! lookup_attribute ("alias",
12998 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12999 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
13000 && TREE_CODE (arg1) == ADDR_EXPR
13001 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
13002 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
13003 && ! lookup_attribute ("alias",
13004 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
13005 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
13007 /* We know that we're looking at the address of two
13008 non-weak, unaliased, static _DECL nodes.
13010 It is both wasteful and incorrect to call operand_equal_p
13011 to compare the two ADDR_EXPR nodes. It is wasteful in that
13012 all we need to do is test pointer equality for the arguments
13013 to the two ADDR_EXPR nodes. It is incorrect to use
13014 operand_equal_p as that function is NOT equivalent to a
13015 C equality test. It can in fact return false for two
13016 objects which would test as equal using the C equality
13017 operator. */
13018 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
13019 return constant_boolean_node (equal
13020 ? code == EQ_EXPR : code != EQ_EXPR,
13021 type);
13024 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
13025 a MINUS_EXPR of a constant, we can convert it into a comparison with
13026 a revised constant as long as no overflow occurs. */
13027 if (TREE_CODE (arg1) == INTEGER_CST
13028 && (TREE_CODE (arg0) == PLUS_EXPR
13029 || TREE_CODE (arg0) == MINUS_EXPR)
13030 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13031 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
13032 ? MINUS_EXPR : PLUS_EXPR,
13033 fold_convert_loc (loc, TREE_TYPE (arg0),
13034 arg1),
13035 TREE_OPERAND (arg0, 1)))
13036 && !TREE_OVERFLOW (tem))
13037 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13039 /* Similarly for a NEGATE_EXPR. */
13040 if (TREE_CODE (arg0) == NEGATE_EXPR
13041 && TREE_CODE (arg1) == INTEGER_CST
13042 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
13043 arg1)))
13044 && TREE_CODE (tem) == INTEGER_CST
13045 && !TREE_OVERFLOW (tem))
13046 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13048 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13049 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13050 && TREE_CODE (arg1) == INTEGER_CST
13051 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13052 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13053 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
13054 fold_convert_loc (loc,
13055 TREE_TYPE (arg0),
13056 arg1),
13057 TREE_OPERAND (arg0, 1)));
13059 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13060 if ((TREE_CODE (arg0) == PLUS_EXPR
13061 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
13062 || TREE_CODE (arg0) == MINUS_EXPR)
13063 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13064 0)),
13065 arg1, 0)
13066 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13067 || POINTER_TYPE_P (TREE_TYPE (arg0))))
13069 tree val = TREE_OPERAND (arg0, 1);
13070 return omit_two_operands_loc (loc, type,
13071 fold_build2_loc (loc, code, type,
13072 val,
13073 build_int_cst (TREE_TYPE (val),
13074 0)),
13075 TREE_OPERAND (arg0, 0), arg1);
13078 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13079 if (TREE_CODE (arg0) == MINUS_EXPR
13080 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
13081 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13082 1)),
13083 arg1, 0)
13084 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
13086 return omit_two_operands_loc (loc, type,
13087 code == NE_EXPR
13088 ? boolean_true_node : boolean_false_node,
13089 TREE_OPERAND (arg0, 1), arg1);
13092 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13093 for !=. Don't do this for ordered comparisons due to overflow. */
13094 if (TREE_CODE (arg0) == MINUS_EXPR
13095 && integer_zerop (arg1))
13096 return fold_build2_loc (loc, code, type,
13097 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
13099 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13100 if (TREE_CODE (arg0) == ABS_EXPR
13101 && (integer_zerop (arg1) || real_zerop (arg1)))
13102 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13104 /* If this is an EQ or NE comparison with zero and ARG0 is
13105 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13106 two operations, but the latter can be done in one less insn
13107 on machines that have only two-operand insns or on which a
13108 constant cannot be the first operand. */
13109 if (TREE_CODE (arg0) == BIT_AND_EXPR
13110 && integer_zerop (arg1))
13112 tree arg00 = TREE_OPERAND (arg0, 0);
13113 tree arg01 = TREE_OPERAND (arg0, 1);
13114 if (TREE_CODE (arg00) == LSHIFT_EXPR
13115 && integer_onep (TREE_OPERAND (arg00, 0)))
13117 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13118 arg01, TREE_OPERAND (arg00, 1));
13119 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13120 build_int_cst (TREE_TYPE (arg0), 1));
13121 return fold_build2_loc (loc, code, type,
13122 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13123 arg1);
13125 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13126 && integer_onep (TREE_OPERAND (arg01, 0)))
13128 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13129 arg00, TREE_OPERAND (arg01, 1));
13130 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13131 build_int_cst (TREE_TYPE (arg0), 1));
13132 return fold_build2_loc (loc, code, type,
13133 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13134 arg1);
13138 /* If this is an NE or EQ comparison of zero against the result of a
13139 signed MOD operation whose second operand is a power of 2, make
13140 the MOD operation unsigned since it is simpler and equivalent. */
13141 if (integer_zerop (arg1)
13142 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13143 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13144 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13145 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13146 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13147 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13149 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13150 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13151 fold_convert_loc (loc, newtype,
13152 TREE_OPERAND (arg0, 0)),
13153 fold_convert_loc (loc, newtype,
13154 TREE_OPERAND (arg0, 1)));
13156 return fold_build2_loc (loc, code, type, newmod,
13157 fold_convert_loc (loc, newtype, arg1));
13160 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13161 C1 is a valid shift constant, and C2 is a power of two, i.e.
13162 a single bit. */
13163 if (TREE_CODE (arg0) == BIT_AND_EXPR
13164 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13165 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13166 == INTEGER_CST
13167 && integer_pow2p (TREE_OPERAND (arg0, 1))
13168 && integer_zerop (arg1))
13170 tree itype = TREE_TYPE (arg0);
13171 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13172 prec = TYPE_PRECISION (itype);
13174 /* Check for a valid shift count. */
13175 if (TREE_INT_CST_HIGH (arg001) == 0
13176 && TREE_INT_CST_LOW (arg001) < prec)
13178 tree arg01 = TREE_OPERAND (arg0, 1);
13179 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13180 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13181 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13182 can be rewritten as (X & (C2 << C1)) != 0. */
13183 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13185 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13186 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13187 return fold_build2_loc (loc, code, type, tem,
13188 fold_convert_loc (loc, itype, arg1));
13190 /* Otherwise, for signed (arithmetic) shifts,
13191 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13192 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13193 else if (!TYPE_UNSIGNED (itype))
13194 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13195 arg000, build_int_cst (itype, 0));
13196 /* Otherwise, of unsigned (logical) shifts,
13197 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13198 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13199 else
13200 return omit_one_operand_loc (loc, type,
13201 code == EQ_EXPR ? integer_one_node
13202 : integer_zero_node,
13203 arg000);
13207 /* If we have (A & C) == C where C is a power of 2, convert this into
13208 (A & C) != 0. Similarly for NE_EXPR. */
13209 if (TREE_CODE (arg0) == BIT_AND_EXPR
13210 && integer_pow2p (TREE_OPERAND (arg0, 1))
13211 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13212 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13213 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13214 integer_zero_node));
13216 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13217 bit, then fold the expression into A < 0 or A >= 0. */
13218 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13219 if (tem)
13220 return tem;
13222 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13223 Similarly for NE_EXPR. */
13224 if (TREE_CODE (arg0) == BIT_AND_EXPR
13225 && TREE_CODE (arg1) == INTEGER_CST
13226 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13228 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13229 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13230 TREE_OPERAND (arg0, 1));
13231 tree dandnotc
13232 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13233 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13234 notc);
13235 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13236 if (integer_nonzerop (dandnotc))
13237 return omit_one_operand_loc (loc, type, rslt, arg0);
13240 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13241 Similarly for NE_EXPR. */
13242 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13243 && TREE_CODE (arg1) == INTEGER_CST
13244 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13246 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13247 tree candnotd
13248 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13249 TREE_OPERAND (arg0, 1),
13250 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13251 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13252 if (integer_nonzerop (candnotd))
13253 return omit_one_operand_loc (loc, type, rslt, arg0);
13256 /* If this is a comparison of a field, we may be able to simplify it. */
13257 if ((TREE_CODE (arg0) == COMPONENT_REF
13258 || TREE_CODE (arg0) == BIT_FIELD_REF)
13259 /* Handle the constant case even without -O
13260 to make sure the warnings are given. */
13261 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13263 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13264 if (t1)
13265 return t1;
13268 /* Optimize comparisons of strlen vs zero to a compare of the
13269 first character of the string vs zero. To wit,
13270 strlen(ptr) == 0 => *ptr == 0
13271 strlen(ptr) != 0 => *ptr != 0
13272 Other cases should reduce to one of these two (or a constant)
13273 due to the return value of strlen being unsigned. */
13274 if (TREE_CODE (arg0) == CALL_EXPR
13275 && integer_zerop (arg1))
13277 tree fndecl = get_callee_fndecl (arg0);
13279 if (fndecl
13280 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13281 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13282 && call_expr_nargs (arg0) == 1
13283 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13285 tree iref = build_fold_indirect_ref_loc (loc,
13286 CALL_EXPR_ARG (arg0, 0));
13287 return fold_build2_loc (loc, code, type, iref,
13288 build_int_cst (TREE_TYPE (iref), 0));
13292 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13293 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13294 if (TREE_CODE (arg0) == RSHIFT_EXPR
13295 && integer_zerop (arg1)
13296 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13298 tree arg00 = TREE_OPERAND (arg0, 0);
13299 tree arg01 = TREE_OPERAND (arg0, 1);
13300 tree itype = TREE_TYPE (arg00);
13301 if (TREE_INT_CST_HIGH (arg01) == 0
13302 && TREE_INT_CST_LOW (arg01)
13303 == (unsigned HOST_WIDE_INT) (element_precision (itype) - 1))
13305 if (TYPE_UNSIGNED (itype))
13307 itype = signed_type_for (itype);
13308 arg00 = fold_convert_loc (loc, itype, arg00);
13310 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13311 type, arg00, build_zero_cst (itype));
13315 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13316 if (integer_zerop (arg1)
13317 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13318 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13319 TREE_OPERAND (arg0, 1));
13321 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13322 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13323 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13324 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13325 build_zero_cst (TREE_TYPE (arg0)));
13326 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13327 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13328 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13329 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13330 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13331 build_zero_cst (TREE_TYPE (arg0)));
13333 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13334 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13335 && TREE_CODE (arg1) == INTEGER_CST
13336 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13337 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13338 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13339 TREE_OPERAND (arg0, 1), arg1));
13341 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13342 (X & C) == 0 when C is a single bit. */
13343 if (TREE_CODE (arg0) == BIT_AND_EXPR
13344 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13345 && integer_zerop (arg1)
13346 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13348 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13349 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13350 TREE_OPERAND (arg0, 1));
13351 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13352 type, tem,
13353 fold_convert_loc (loc, TREE_TYPE (arg0),
13354 arg1));
13357 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13358 constant C is a power of two, i.e. a single bit. */
13359 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13360 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13361 && integer_zerop (arg1)
13362 && integer_pow2p (TREE_OPERAND (arg0, 1))
13363 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13364 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13366 tree arg00 = TREE_OPERAND (arg0, 0);
13367 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13368 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13371 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13372 when is C is a power of two, i.e. a single bit. */
13373 if (TREE_CODE (arg0) == BIT_AND_EXPR
13374 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13375 && integer_zerop (arg1)
13376 && integer_pow2p (TREE_OPERAND (arg0, 1))
13377 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13378 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13380 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13381 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13382 arg000, TREE_OPERAND (arg0, 1));
13383 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13384 tem, build_int_cst (TREE_TYPE (tem), 0));
13387 if (integer_zerop (arg1)
13388 && tree_expr_nonzero_p (arg0))
13390 tree res = constant_boolean_node (code==NE_EXPR, type);
13391 return omit_one_operand_loc (loc, type, res, arg0);
13394 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13395 if (TREE_CODE (arg0) == NEGATE_EXPR
13396 && TREE_CODE (arg1) == NEGATE_EXPR)
13397 return fold_build2_loc (loc, code, type,
13398 TREE_OPERAND (arg0, 0),
13399 fold_convert_loc (loc, TREE_TYPE (arg0),
13400 TREE_OPERAND (arg1, 0)));
13402 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13403 if (TREE_CODE (arg0) == BIT_AND_EXPR
13404 && TREE_CODE (arg1) == BIT_AND_EXPR)
13406 tree arg00 = TREE_OPERAND (arg0, 0);
13407 tree arg01 = TREE_OPERAND (arg0, 1);
13408 tree arg10 = TREE_OPERAND (arg1, 0);
13409 tree arg11 = TREE_OPERAND (arg1, 1);
13410 tree itype = TREE_TYPE (arg0);
13412 if (operand_equal_p (arg01, arg11, 0))
13413 return fold_build2_loc (loc, code, type,
13414 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13415 fold_build2_loc (loc,
13416 BIT_XOR_EXPR, itype,
13417 arg00, arg10),
13418 arg01),
13419 build_zero_cst (itype));
13421 if (operand_equal_p (arg01, arg10, 0))
13422 return fold_build2_loc (loc, code, type,
13423 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13424 fold_build2_loc (loc,
13425 BIT_XOR_EXPR, itype,
13426 arg00, arg11),
13427 arg01),
13428 build_zero_cst (itype));
13430 if (operand_equal_p (arg00, arg11, 0))
13431 return fold_build2_loc (loc, code, type,
13432 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13433 fold_build2_loc (loc,
13434 BIT_XOR_EXPR, itype,
13435 arg01, arg10),
13436 arg00),
13437 build_zero_cst (itype));
13439 if (operand_equal_p (arg00, arg10, 0))
13440 return fold_build2_loc (loc, code, type,
13441 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13442 fold_build2_loc (loc,
13443 BIT_XOR_EXPR, itype,
13444 arg01, arg11),
13445 arg00),
13446 build_zero_cst (itype));
13449 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13450 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13452 tree arg00 = TREE_OPERAND (arg0, 0);
13453 tree arg01 = TREE_OPERAND (arg0, 1);
13454 tree arg10 = TREE_OPERAND (arg1, 0);
13455 tree arg11 = TREE_OPERAND (arg1, 1);
13456 tree itype = TREE_TYPE (arg0);
13458 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13459 operand_equal_p guarantees no side-effects so we don't need
13460 to use omit_one_operand on Z. */
13461 if (operand_equal_p (arg01, arg11, 0))
13462 return fold_build2_loc (loc, code, type, arg00,
13463 fold_convert_loc (loc, TREE_TYPE (arg00),
13464 arg10));
13465 if (operand_equal_p (arg01, arg10, 0))
13466 return fold_build2_loc (loc, code, type, arg00,
13467 fold_convert_loc (loc, TREE_TYPE (arg00),
13468 arg11));
13469 if (operand_equal_p (arg00, arg11, 0))
13470 return fold_build2_loc (loc, code, type, arg01,
13471 fold_convert_loc (loc, TREE_TYPE (arg01),
13472 arg10));
13473 if (operand_equal_p (arg00, arg10, 0))
13474 return fold_build2_loc (loc, code, type, arg01,
13475 fold_convert_loc (loc, TREE_TYPE (arg01),
13476 arg11));
13478 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13479 if (TREE_CODE (arg01) == INTEGER_CST
13480 && TREE_CODE (arg11) == INTEGER_CST)
13482 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13483 fold_convert_loc (loc, itype, arg11));
13484 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13485 return fold_build2_loc (loc, code, type, tem,
13486 fold_convert_loc (loc, itype, arg10));
13490 /* Attempt to simplify equality/inequality comparisons of complex
13491 values. Only lower the comparison if the result is known or
13492 can be simplified to a single scalar comparison. */
13493 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13494 || TREE_CODE (arg0) == COMPLEX_CST)
13495 && (TREE_CODE (arg1) == COMPLEX_EXPR
13496 || TREE_CODE (arg1) == COMPLEX_CST))
13498 tree real0, imag0, real1, imag1;
13499 tree rcond, icond;
13501 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13503 real0 = TREE_OPERAND (arg0, 0);
13504 imag0 = TREE_OPERAND (arg0, 1);
13506 else
13508 real0 = TREE_REALPART (arg0);
13509 imag0 = TREE_IMAGPART (arg0);
13512 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13514 real1 = TREE_OPERAND (arg1, 0);
13515 imag1 = TREE_OPERAND (arg1, 1);
13517 else
13519 real1 = TREE_REALPART (arg1);
13520 imag1 = TREE_IMAGPART (arg1);
13523 rcond = fold_binary_loc (loc, code, type, real0, real1);
13524 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13526 if (integer_zerop (rcond))
13528 if (code == EQ_EXPR)
13529 return omit_two_operands_loc (loc, type, boolean_false_node,
13530 imag0, imag1);
13531 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13533 else
13535 if (code == NE_EXPR)
13536 return omit_two_operands_loc (loc, type, boolean_true_node,
13537 imag0, imag1);
13538 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13542 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13543 if (icond && TREE_CODE (icond) == INTEGER_CST)
13545 if (integer_zerop (icond))
13547 if (code == EQ_EXPR)
13548 return omit_two_operands_loc (loc, type, boolean_false_node,
13549 real0, real1);
13550 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13552 else
13554 if (code == NE_EXPR)
13555 return omit_two_operands_loc (loc, type, boolean_true_node,
13556 real0, real1);
13557 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13562 return NULL_TREE;
13564 case LT_EXPR:
13565 case GT_EXPR:
13566 case LE_EXPR:
13567 case GE_EXPR:
13568 tem = fold_comparison (loc, code, type, op0, op1);
13569 if (tem != NULL_TREE)
13570 return tem;
13572 /* Transform comparisons of the form X +- C CMP X. */
13573 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13574 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13575 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13576 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13577 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13578 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13580 tree arg01 = TREE_OPERAND (arg0, 1);
13581 enum tree_code code0 = TREE_CODE (arg0);
13582 int is_positive;
13584 if (TREE_CODE (arg01) == REAL_CST)
13585 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13586 else
13587 is_positive = tree_int_cst_sgn (arg01);
13589 /* (X - c) > X becomes false. */
13590 if (code == GT_EXPR
13591 && ((code0 == MINUS_EXPR && is_positive >= 0)
13592 || (code0 == PLUS_EXPR && is_positive <= 0)))
13594 if (TREE_CODE (arg01) == INTEGER_CST
13595 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13596 fold_overflow_warning (("assuming signed overflow does not "
13597 "occur when assuming that (X - c) > X "
13598 "is always false"),
13599 WARN_STRICT_OVERFLOW_ALL);
13600 return constant_boolean_node (0, type);
13603 /* Likewise (X + c) < X becomes false. */
13604 if (code == LT_EXPR
13605 && ((code0 == PLUS_EXPR && is_positive >= 0)
13606 || (code0 == MINUS_EXPR && is_positive <= 0)))
13608 if (TREE_CODE (arg01) == INTEGER_CST
13609 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13610 fold_overflow_warning (("assuming signed overflow does not "
13611 "occur when assuming that "
13612 "(X + c) < X is always false"),
13613 WARN_STRICT_OVERFLOW_ALL);
13614 return constant_boolean_node (0, type);
13617 /* Convert (X - c) <= X to true. */
13618 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13619 && code == LE_EXPR
13620 && ((code0 == MINUS_EXPR && is_positive >= 0)
13621 || (code0 == PLUS_EXPR && is_positive <= 0)))
13623 if (TREE_CODE (arg01) == INTEGER_CST
13624 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13625 fold_overflow_warning (("assuming signed overflow does not "
13626 "occur when assuming that "
13627 "(X - c) <= X is always true"),
13628 WARN_STRICT_OVERFLOW_ALL);
13629 return constant_boolean_node (1, type);
13632 /* Convert (X + c) >= X to true. */
13633 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13634 && code == GE_EXPR
13635 && ((code0 == PLUS_EXPR && is_positive >= 0)
13636 || (code0 == MINUS_EXPR && is_positive <= 0)))
13638 if (TREE_CODE (arg01) == INTEGER_CST
13639 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13640 fold_overflow_warning (("assuming signed overflow does not "
13641 "occur when assuming that "
13642 "(X + c) >= X is always true"),
13643 WARN_STRICT_OVERFLOW_ALL);
13644 return constant_boolean_node (1, type);
13647 if (TREE_CODE (arg01) == INTEGER_CST)
13649 /* Convert X + c > X and X - c < X to true for integers. */
13650 if (code == GT_EXPR
13651 && ((code0 == PLUS_EXPR && is_positive > 0)
13652 || (code0 == MINUS_EXPR && is_positive < 0)))
13654 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13655 fold_overflow_warning (("assuming signed overflow does "
13656 "not occur when assuming that "
13657 "(X + c) > X is always true"),
13658 WARN_STRICT_OVERFLOW_ALL);
13659 return constant_boolean_node (1, type);
13662 if (code == LT_EXPR
13663 && ((code0 == MINUS_EXPR && is_positive > 0)
13664 || (code0 == PLUS_EXPR && is_positive < 0)))
13666 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13667 fold_overflow_warning (("assuming signed overflow does "
13668 "not occur when assuming that "
13669 "(X - c) < X is always true"),
13670 WARN_STRICT_OVERFLOW_ALL);
13671 return constant_boolean_node (1, type);
13674 /* Convert X + c <= X and X - c >= X to false for integers. */
13675 if (code == LE_EXPR
13676 && ((code0 == PLUS_EXPR && is_positive > 0)
13677 || (code0 == MINUS_EXPR && is_positive < 0)))
13679 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13680 fold_overflow_warning (("assuming signed overflow does "
13681 "not occur when assuming that "
13682 "(X + c) <= X is always false"),
13683 WARN_STRICT_OVERFLOW_ALL);
13684 return constant_boolean_node (0, type);
13687 if (code == GE_EXPR
13688 && ((code0 == MINUS_EXPR && is_positive > 0)
13689 || (code0 == PLUS_EXPR && is_positive < 0)))
13691 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13692 fold_overflow_warning (("assuming signed overflow does "
13693 "not occur when assuming that "
13694 "(X - c) >= X is always false"),
13695 WARN_STRICT_OVERFLOW_ALL);
13696 return constant_boolean_node (0, type);
13701 /* Comparisons with the highest or lowest possible integer of
13702 the specified precision will have known values. */
13704 tree arg1_type = TREE_TYPE (arg1);
13705 unsigned int width = TYPE_PRECISION (arg1_type);
13707 if (TREE_CODE (arg1) == INTEGER_CST
13708 && width <= HOST_BITS_PER_DOUBLE_INT
13709 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13711 HOST_WIDE_INT signed_max_hi;
13712 unsigned HOST_WIDE_INT signed_max_lo;
13713 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13715 if (width <= HOST_BITS_PER_WIDE_INT)
13717 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13718 - 1;
13719 signed_max_hi = 0;
13720 max_hi = 0;
13722 if (TYPE_UNSIGNED (arg1_type))
13724 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13725 min_lo = 0;
13726 min_hi = 0;
13728 else
13730 max_lo = signed_max_lo;
13731 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13732 min_hi = -1;
13735 else
13737 width -= HOST_BITS_PER_WIDE_INT;
13738 signed_max_lo = -1;
13739 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13740 - 1;
13741 max_lo = -1;
13742 min_lo = 0;
13744 if (TYPE_UNSIGNED (arg1_type))
13746 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13747 min_hi = 0;
13749 else
13751 max_hi = signed_max_hi;
13752 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13756 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13757 && TREE_INT_CST_LOW (arg1) == max_lo)
13758 switch (code)
13760 case GT_EXPR:
13761 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13763 case GE_EXPR:
13764 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13766 case LE_EXPR:
13767 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13769 case LT_EXPR:
13770 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13772 /* The GE_EXPR and LT_EXPR cases above are not normally
13773 reached because of previous transformations. */
13775 default:
13776 break;
13778 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13779 == max_hi
13780 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13781 switch (code)
13783 case GT_EXPR:
13784 arg1 = const_binop (PLUS_EXPR, arg1,
13785 build_int_cst (TREE_TYPE (arg1), 1));
13786 return fold_build2_loc (loc, EQ_EXPR, type,
13787 fold_convert_loc (loc,
13788 TREE_TYPE (arg1), arg0),
13789 arg1);
13790 case LE_EXPR:
13791 arg1 = const_binop (PLUS_EXPR, arg1,
13792 build_int_cst (TREE_TYPE (arg1), 1));
13793 return fold_build2_loc (loc, NE_EXPR, type,
13794 fold_convert_loc (loc, TREE_TYPE (arg1),
13795 arg0),
13796 arg1);
13797 default:
13798 break;
13800 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13801 == min_hi
13802 && TREE_INT_CST_LOW (arg1) == min_lo)
13803 switch (code)
13805 case LT_EXPR:
13806 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13808 case LE_EXPR:
13809 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13811 case GE_EXPR:
13812 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13814 case GT_EXPR:
13815 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13817 default:
13818 break;
13820 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13821 == min_hi
13822 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13823 switch (code)
13825 case GE_EXPR:
13826 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13827 return fold_build2_loc (loc, NE_EXPR, type,
13828 fold_convert_loc (loc,
13829 TREE_TYPE (arg1), arg0),
13830 arg1);
13831 case LT_EXPR:
13832 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13833 return fold_build2_loc (loc, EQ_EXPR, type,
13834 fold_convert_loc (loc, TREE_TYPE (arg1),
13835 arg0),
13836 arg1);
13837 default:
13838 break;
13841 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13842 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13843 && TYPE_UNSIGNED (arg1_type)
13844 /* We will flip the signedness of the comparison operator
13845 associated with the mode of arg1, so the sign bit is
13846 specified by this mode. Check that arg1 is the signed
13847 max associated with this sign bit. */
13848 && width == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13849 /* signed_type does not work on pointer types. */
13850 && INTEGRAL_TYPE_P (arg1_type))
13852 /* The following case also applies to X < signed_max+1
13853 and X >= signed_max+1 because previous transformations. */
13854 if (code == LE_EXPR || code == GT_EXPR)
13856 tree st = signed_type_for (arg1_type);
13857 return fold_build2_loc (loc,
13858 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13859 type, fold_convert_loc (loc, st, arg0),
13860 build_int_cst (st, 0));
13866 /* If we are comparing an ABS_EXPR with a constant, we can
13867 convert all the cases into explicit comparisons, but they may
13868 well not be faster than doing the ABS and one comparison.
13869 But ABS (X) <= C is a range comparison, which becomes a subtraction
13870 and a comparison, and is probably faster. */
13871 if (code == LE_EXPR
13872 && TREE_CODE (arg1) == INTEGER_CST
13873 && TREE_CODE (arg0) == ABS_EXPR
13874 && ! TREE_SIDE_EFFECTS (arg0)
13875 && (0 != (tem = negate_expr (arg1)))
13876 && TREE_CODE (tem) == INTEGER_CST
13877 && !TREE_OVERFLOW (tem))
13878 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13879 build2 (GE_EXPR, type,
13880 TREE_OPERAND (arg0, 0), tem),
13881 build2 (LE_EXPR, type,
13882 TREE_OPERAND (arg0, 0), arg1));
13884 /* Convert ABS_EXPR<x> >= 0 to true. */
13885 strict_overflow_p = false;
13886 if (code == GE_EXPR
13887 && (integer_zerop (arg1)
13888 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13889 && real_zerop (arg1)))
13890 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13892 if (strict_overflow_p)
13893 fold_overflow_warning (("assuming signed overflow does not occur "
13894 "when simplifying comparison of "
13895 "absolute value and zero"),
13896 WARN_STRICT_OVERFLOW_CONDITIONAL);
13897 return omit_one_operand_loc (loc, type,
13898 constant_boolean_node (true, type),
13899 arg0);
13902 /* Convert ABS_EXPR<x> < 0 to false. */
13903 strict_overflow_p = false;
13904 if (code == LT_EXPR
13905 && (integer_zerop (arg1) || real_zerop (arg1))
13906 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13908 if (strict_overflow_p)
13909 fold_overflow_warning (("assuming signed overflow does not occur "
13910 "when simplifying comparison of "
13911 "absolute value and zero"),
13912 WARN_STRICT_OVERFLOW_CONDITIONAL);
13913 return omit_one_operand_loc (loc, type,
13914 constant_boolean_node (false, type),
13915 arg0);
13918 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13919 and similarly for >= into !=. */
13920 if ((code == LT_EXPR || code == GE_EXPR)
13921 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13922 && TREE_CODE (arg1) == LSHIFT_EXPR
13923 && integer_onep (TREE_OPERAND (arg1, 0)))
13924 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13925 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13926 TREE_OPERAND (arg1, 1)),
13927 build_zero_cst (TREE_TYPE (arg0)));
13929 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13930 otherwise Y might be >= # of bits in X's type and thus e.g.
13931 (unsigned char) (1 << Y) for Y 15 might be 0.
13932 If the cast is widening, then 1 << Y should have unsigned type,
13933 otherwise if Y is number of bits in the signed shift type minus 1,
13934 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13935 31 might be 0xffffffff80000000. */
13936 if ((code == LT_EXPR || code == GE_EXPR)
13937 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13938 && CONVERT_EXPR_P (arg1)
13939 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13940 && (TYPE_PRECISION (TREE_TYPE (arg1))
13941 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13942 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13943 || (TYPE_PRECISION (TREE_TYPE (arg1))
13944 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13945 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13947 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13948 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13949 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13950 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13951 build_zero_cst (TREE_TYPE (arg0)));
13954 return NULL_TREE;
13956 case UNORDERED_EXPR:
13957 case ORDERED_EXPR:
13958 case UNLT_EXPR:
13959 case UNLE_EXPR:
13960 case UNGT_EXPR:
13961 case UNGE_EXPR:
13962 case UNEQ_EXPR:
13963 case LTGT_EXPR:
13964 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13966 t1 = fold_relational_const (code, type, arg0, arg1);
13967 if (t1 != NULL_TREE)
13968 return t1;
13971 /* If the first operand is NaN, the result is constant. */
13972 if (TREE_CODE (arg0) == REAL_CST
13973 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13974 && (code != LTGT_EXPR || ! flag_trapping_math))
13976 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13977 ? integer_zero_node
13978 : integer_one_node;
13979 return omit_one_operand_loc (loc, type, t1, arg1);
13982 /* If the second operand is NaN, the result is constant. */
13983 if (TREE_CODE (arg1) == REAL_CST
13984 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13985 && (code != LTGT_EXPR || ! flag_trapping_math))
13987 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13988 ? integer_zero_node
13989 : integer_one_node;
13990 return omit_one_operand_loc (loc, type, t1, arg0);
13993 /* Simplify unordered comparison of something with itself. */
13994 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13995 && operand_equal_p (arg0, arg1, 0))
13996 return constant_boolean_node (1, type);
13998 if (code == LTGT_EXPR
13999 && !flag_trapping_math
14000 && operand_equal_p (arg0, arg1, 0))
14001 return constant_boolean_node (0, type);
14003 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
14005 tree targ0 = strip_float_extensions (arg0);
14006 tree targ1 = strip_float_extensions (arg1);
14007 tree newtype = TREE_TYPE (targ0);
14009 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
14010 newtype = TREE_TYPE (targ1);
14012 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
14013 return fold_build2_loc (loc, code, type,
14014 fold_convert_loc (loc, newtype, targ0),
14015 fold_convert_loc (loc, newtype, targ1));
14018 return NULL_TREE;
14020 case COMPOUND_EXPR:
14021 /* When pedantic, a compound expression can be neither an lvalue
14022 nor an integer constant expression. */
14023 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
14024 return NULL_TREE;
14025 /* Don't let (0, 0) be null pointer constant. */
14026 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
14027 : fold_convert_loc (loc, type, arg1);
14028 return pedantic_non_lvalue_loc (loc, tem);
14030 case COMPLEX_EXPR:
14031 if ((TREE_CODE (arg0) == REAL_CST
14032 && TREE_CODE (arg1) == REAL_CST)
14033 || (TREE_CODE (arg0) == INTEGER_CST
14034 && TREE_CODE (arg1) == INTEGER_CST))
14035 return build_complex (type, arg0, arg1);
14036 if (TREE_CODE (arg0) == REALPART_EXPR
14037 && TREE_CODE (arg1) == IMAGPART_EXPR
14038 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
14039 && operand_equal_p (TREE_OPERAND (arg0, 0),
14040 TREE_OPERAND (arg1, 0), 0))
14041 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
14042 TREE_OPERAND (arg1, 0));
14043 return NULL_TREE;
14045 case ASSERT_EXPR:
14046 /* An ASSERT_EXPR should never be passed to fold_binary. */
14047 gcc_unreachable ();
14049 case VEC_PACK_TRUNC_EXPR:
14050 case VEC_PACK_FIX_TRUNC_EXPR:
14052 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14053 tree *elts;
14055 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
14056 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
14057 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14058 return NULL_TREE;
14060 elts = XALLOCAVEC (tree, nelts);
14061 if (!vec_cst_ctor_to_array (arg0, elts)
14062 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
14063 return NULL_TREE;
14065 for (i = 0; i < nelts; i++)
14067 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
14068 ? NOP_EXPR : FIX_TRUNC_EXPR,
14069 TREE_TYPE (type), elts[i]);
14070 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
14071 return NULL_TREE;
14074 return build_vector (type, elts);
14077 case VEC_WIDEN_MULT_LO_EXPR:
14078 case VEC_WIDEN_MULT_HI_EXPR:
14079 case VEC_WIDEN_MULT_EVEN_EXPR:
14080 case VEC_WIDEN_MULT_ODD_EXPR:
14082 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
14083 unsigned int out, ofs, scale;
14084 tree *elts;
14086 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
14087 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
14088 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14089 return NULL_TREE;
14091 elts = XALLOCAVEC (tree, nelts * 4);
14092 if (!vec_cst_ctor_to_array (arg0, elts)
14093 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
14094 return NULL_TREE;
14096 if (code == VEC_WIDEN_MULT_LO_EXPR)
14097 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
14098 else if (code == VEC_WIDEN_MULT_HI_EXPR)
14099 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14100 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14101 scale = 1, ofs = 0;
14102 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14103 scale = 1, ofs = 1;
14105 for (out = 0; out < nelts; out++)
14107 unsigned int in1 = (out << scale) + ofs;
14108 unsigned int in2 = in1 + nelts * 2;
14109 tree t1, t2;
14111 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14112 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14114 if (t1 == NULL_TREE || t2 == NULL_TREE)
14115 return NULL_TREE;
14116 elts[out] = const_binop (MULT_EXPR, t1, t2);
14117 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14118 return NULL_TREE;
14121 return build_vector (type, elts);
14124 default:
14125 return NULL_TREE;
14126 } /* switch (code) */
14129 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14130 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14131 of GOTO_EXPR. */
14133 static tree
14134 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14136 switch (TREE_CODE (*tp))
14138 case LABEL_EXPR:
14139 return *tp;
14141 case GOTO_EXPR:
14142 *walk_subtrees = 0;
14144 /* ... fall through ... */
14146 default:
14147 return NULL_TREE;
14151 /* Return whether the sub-tree ST contains a label which is accessible from
14152 outside the sub-tree. */
14154 static bool
14155 contains_label_p (tree st)
14157 return
14158 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14161 /* Fold a ternary expression of code CODE and type TYPE with operands
14162 OP0, OP1, and OP2. Return the folded expression if folding is
14163 successful. Otherwise, return NULL_TREE. */
14165 tree
14166 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14167 tree op0, tree op1, tree op2)
14169 tree tem;
14170 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14171 enum tree_code_class kind = TREE_CODE_CLASS (code);
14173 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14174 && TREE_CODE_LENGTH (code) == 3);
14176 /* Strip any conversions that don't change the mode. This is safe
14177 for every expression, except for a comparison expression because
14178 its signedness is derived from its operands. So, in the latter
14179 case, only strip conversions that don't change the signedness.
14181 Note that this is done as an internal manipulation within the
14182 constant folder, in order to find the simplest representation of
14183 the arguments so that their form can be studied. In any cases,
14184 the appropriate type conversions should be put back in the tree
14185 that will get out of the constant folder. */
14186 if (op0)
14188 arg0 = op0;
14189 STRIP_NOPS (arg0);
14192 if (op1)
14194 arg1 = op1;
14195 STRIP_NOPS (arg1);
14198 if (op2)
14200 arg2 = op2;
14201 STRIP_NOPS (arg2);
14204 switch (code)
14206 case COMPONENT_REF:
14207 if (TREE_CODE (arg0) == CONSTRUCTOR
14208 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14210 unsigned HOST_WIDE_INT idx;
14211 tree field, value;
14212 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14213 if (field == arg1)
14214 return value;
14216 return NULL_TREE;
14218 case COND_EXPR:
14219 case VEC_COND_EXPR:
14220 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14221 so all simple results must be passed through pedantic_non_lvalue. */
14222 if (TREE_CODE (arg0) == INTEGER_CST)
14224 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14225 tem = integer_zerop (arg0) ? op2 : op1;
14226 /* Only optimize constant conditions when the selected branch
14227 has the same type as the COND_EXPR. This avoids optimizing
14228 away "c ? x : throw", where the throw has a void type.
14229 Avoid throwing away that operand which contains label. */
14230 if ((!TREE_SIDE_EFFECTS (unused_op)
14231 || !contains_label_p (unused_op))
14232 && (! VOID_TYPE_P (TREE_TYPE (tem))
14233 || VOID_TYPE_P (type)))
14234 return pedantic_non_lvalue_loc (loc, tem);
14235 return NULL_TREE;
14237 else if (TREE_CODE (arg0) == VECTOR_CST)
14239 if (integer_all_onesp (arg0))
14240 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14241 if (integer_zerop (arg0))
14242 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14244 if ((TREE_CODE (arg1) == VECTOR_CST
14245 || TREE_CODE (arg1) == CONSTRUCTOR)
14246 && (TREE_CODE (arg2) == VECTOR_CST
14247 || TREE_CODE (arg2) == CONSTRUCTOR))
14249 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14250 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14251 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14252 for (i = 0; i < nelts; i++)
14254 tree val = VECTOR_CST_ELT (arg0, i);
14255 if (integer_all_onesp (val))
14256 sel[i] = i;
14257 else if (integer_zerop (val))
14258 sel[i] = nelts + i;
14259 else /* Currently unreachable. */
14260 return NULL_TREE;
14262 tree t = fold_vec_perm (type, arg1, arg2, sel);
14263 if (t != NULL_TREE)
14264 return t;
14268 if (operand_equal_p (arg1, op2, 0))
14269 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14271 /* If we have A op B ? A : C, we may be able to convert this to a
14272 simpler expression, depending on the operation and the values
14273 of B and C. Signed zeros prevent all of these transformations,
14274 for reasons given above each one.
14276 Also try swapping the arguments and inverting the conditional. */
14277 if (COMPARISON_CLASS_P (arg0)
14278 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14279 arg1, TREE_OPERAND (arg0, 1))
14280 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14282 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14283 if (tem)
14284 return tem;
14287 if (COMPARISON_CLASS_P (arg0)
14288 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14289 op2,
14290 TREE_OPERAND (arg0, 1))
14291 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14293 location_t loc0 = expr_location_or (arg0, loc);
14294 tem = fold_invert_truthvalue (loc0, arg0);
14295 if (tem && COMPARISON_CLASS_P (tem))
14297 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14298 if (tem)
14299 return tem;
14303 /* If the second operand is simpler than the third, swap them
14304 since that produces better jump optimization results. */
14305 if (truth_value_p (TREE_CODE (arg0))
14306 && tree_swap_operands_p (op1, op2, false))
14308 location_t loc0 = expr_location_or (arg0, loc);
14309 /* See if this can be inverted. If it can't, possibly because
14310 it was a floating-point inequality comparison, don't do
14311 anything. */
14312 tem = fold_invert_truthvalue (loc0, arg0);
14313 if (tem)
14314 return fold_build3_loc (loc, code, type, tem, op2, op1);
14317 /* Convert A ? 1 : 0 to simply A. */
14318 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14319 : (integer_onep (op1)
14320 && !VECTOR_TYPE_P (type)))
14321 && integer_zerop (op2)
14322 /* If we try to convert OP0 to our type, the
14323 call to fold will try to move the conversion inside
14324 a COND, which will recurse. In that case, the COND_EXPR
14325 is probably the best choice, so leave it alone. */
14326 && type == TREE_TYPE (arg0))
14327 return pedantic_non_lvalue_loc (loc, arg0);
14329 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14330 over COND_EXPR in cases such as floating point comparisons. */
14331 if (integer_zerop (op1)
14332 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14333 : (integer_onep (op2)
14334 && !VECTOR_TYPE_P (type)))
14335 && truth_value_p (TREE_CODE (arg0)))
14336 return pedantic_non_lvalue_loc (loc,
14337 fold_convert_loc (loc, type,
14338 invert_truthvalue_loc (loc,
14339 arg0)));
14341 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14342 if (TREE_CODE (arg0) == LT_EXPR
14343 && integer_zerop (TREE_OPERAND (arg0, 1))
14344 && integer_zerop (op2)
14345 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14347 /* sign_bit_p looks through both zero and sign extensions,
14348 but for this optimization only sign extensions are
14349 usable. */
14350 tree tem2 = TREE_OPERAND (arg0, 0);
14351 while (tem != tem2)
14353 if (TREE_CODE (tem2) != NOP_EXPR
14354 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14356 tem = NULL_TREE;
14357 break;
14359 tem2 = TREE_OPERAND (tem2, 0);
14361 /* sign_bit_p only checks ARG1 bits within A's precision.
14362 If <sign bit of A> has wider type than A, bits outside
14363 of A's precision in <sign bit of A> need to be checked.
14364 If they are all 0, this optimization needs to be done
14365 in unsigned A's type, if they are all 1 in signed A's type,
14366 otherwise this can't be done. */
14367 if (tem
14368 && TYPE_PRECISION (TREE_TYPE (tem))
14369 < TYPE_PRECISION (TREE_TYPE (arg1))
14370 && TYPE_PRECISION (TREE_TYPE (tem))
14371 < TYPE_PRECISION (type))
14373 unsigned HOST_WIDE_INT mask_lo;
14374 HOST_WIDE_INT mask_hi;
14375 int inner_width, outer_width;
14376 tree tem_type;
14378 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14379 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14380 if (outer_width > TYPE_PRECISION (type))
14381 outer_width = TYPE_PRECISION (type);
14383 if (outer_width > HOST_BITS_PER_WIDE_INT)
14385 mask_hi = (HOST_WIDE_INT_M1U
14386 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14387 mask_lo = -1;
14389 else
14391 mask_hi = 0;
14392 mask_lo = (HOST_WIDE_INT_M1U
14393 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14395 if (inner_width > HOST_BITS_PER_WIDE_INT)
14397 mask_hi &= ~(HOST_WIDE_INT_M1U
14398 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14399 mask_lo = 0;
14401 else
14402 mask_lo &= ~(HOST_WIDE_INT_M1U
14403 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14405 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14406 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14408 tem_type = signed_type_for (TREE_TYPE (tem));
14409 tem = fold_convert_loc (loc, tem_type, tem);
14411 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14412 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14414 tem_type = unsigned_type_for (TREE_TYPE (tem));
14415 tem = fold_convert_loc (loc, tem_type, tem);
14417 else
14418 tem = NULL;
14421 if (tem)
14422 return
14423 fold_convert_loc (loc, type,
14424 fold_build2_loc (loc, BIT_AND_EXPR,
14425 TREE_TYPE (tem), tem,
14426 fold_convert_loc (loc,
14427 TREE_TYPE (tem),
14428 arg1)));
14431 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14432 already handled above. */
14433 if (TREE_CODE (arg0) == BIT_AND_EXPR
14434 && integer_onep (TREE_OPERAND (arg0, 1))
14435 && integer_zerop (op2)
14436 && integer_pow2p (arg1))
14438 tree tem = TREE_OPERAND (arg0, 0);
14439 STRIP_NOPS (tem);
14440 if (TREE_CODE (tem) == RSHIFT_EXPR
14441 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14442 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14443 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14444 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14445 TREE_OPERAND (tem, 0), arg1);
14448 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14449 is probably obsolete because the first operand should be a
14450 truth value (that's why we have the two cases above), but let's
14451 leave it in until we can confirm this for all front-ends. */
14452 if (integer_zerop (op2)
14453 && TREE_CODE (arg0) == NE_EXPR
14454 && integer_zerop (TREE_OPERAND (arg0, 1))
14455 && integer_pow2p (arg1)
14456 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14457 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14458 arg1, OEP_ONLY_CONST))
14459 return pedantic_non_lvalue_loc (loc,
14460 fold_convert_loc (loc, type,
14461 TREE_OPERAND (arg0, 0)));
14463 /* Disable the transformations below for vectors, since
14464 fold_binary_op_with_conditional_arg may undo them immediately,
14465 yielding an infinite loop. */
14466 if (code == VEC_COND_EXPR)
14467 return NULL_TREE;
14469 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14470 if (integer_zerop (op2)
14471 && truth_value_p (TREE_CODE (arg0))
14472 && truth_value_p (TREE_CODE (arg1))
14473 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14474 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14475 : TRUTH_ANDIF_EXPR,
14476 type, fold_convert_loc (loc, type, arg0), arg1);
14478 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14479 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14480 && truth_value_p (TREE_CODE (arg0))
14481 && truth_value_p (TREE_CODE (arg1))
14482 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14484 location_t loc0 = expr_location_or (arg0, loc);
14485 /* Only perform transformation if ARG0 is easily inverted. */
14486 tem = fold_invert_truthvalue (loc0, arg0);
14487 if (tem)
14488 return fold_build2_loc (loc, code == VEC_COND_EXPR
14489 ? BIT_IOR_EXPR
14490 : TRUTH_ORIF_EXPR,
14491 type, fold_convert_loc (loc, type, tem),
14492 arg1);
14495 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14496 if (integer_zerop (arg1)
14497 && truth_value_p (TREE_CODE (arg0))
14498 && truth_value_p (TREE_CODE (op2))
14499 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14501 location_t loc0 = expr_location_or (arg0, loc);
14502 /* Only perform transformation if ARG0 is easily inverted. */
14503 tem = fold_invert_truthvalue (loc0, arg0);
14504 if (tem)
14505 return fold_build2_loc (loc, code == VEC_COND_EXPR
14506 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14507 type, fold_convert_loc (loc, type, tem),
14508 op2);
14511 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14512 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14513 && truth_value_p (TREE_CODE (arg0))
14514 && truth_value_p (TREE_CODE (op2))
14515 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14516 return fold_build2_loc (loc, code == VEC_COND_EXPR
14517 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14518 type, fold_convert_loc (loc, type, arg0), op2);
14520 return NULL_TREE;
14522 case CALL_EXPR:
14523 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14524 of fold_ternary on them. */
14525 gcc_unreachable ();
14527 case BIT_FIELD_REF:
14528 if ((TREE_CODE (arg0) == VECTOR_CST
14529 || (TREE_CODE (arg0) == CONSTRUCTOR
14530 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14531 && (type == TREE_TYPE (TREE_TYPE (arg0))
14532 || (TREE_CODE (type) == VECTOR_TYPE
14533 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14535 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14536 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14537 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14538 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14540 if (n != 0
14541 && (idx % width) == 0
14542 && (n % width) == 0
14543 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14545 idx = idx / width;
14546 n = n / width;
14548 if (TREE_CODE (arg0) == VECTOR_CST)
14550 if (n == 1)
14551 return VECTOR_CST_ELT (arg0, idx);
14553 tree *vals = XALLOCAVEC (tree, n);
14554 for (unsigned i = 0; i < n; ++i)
14555 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14556 return build_vector (type, vals);
14559 /* Constructor elements can be subvectors. */
14560 unsigned HOST_WIDE_INT k = 1;
14561 if (CONSTRUCTOR_NELTS (arg0) != 0)
14563 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14564 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14565 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14568 /* We keep an exact subset of the constructor elements. */
14569 if ((idx % k) == 0 && (n % k) == 0)
14571 if (CONSTRUCTOR_NELTS (arg0) == 0)
14572 return build_constructor (type, NULL);
14573 idx /= k;
14574 n /= k;
14575 if (n == 1)
14577 if (idx < CONSTRUCTOR_NELTS (arg0))
14578 return CONSTRUCTOR_ELT (arg0, idx)->value;
14579 return build_zero_cst (type);
14582 vec<constructor_elt, va_gc> *vals;
14583 vec_alloc (vals, n);
14584 for (unsigned i = 0;
14585 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14586 ++i)
14587 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14588 CONSTRUCTOR_ELT
14589 (arg0, idx + i)->value);
14590 return build_constructor (type, vals);
14592 /* The bitfield references a single constructor element. */
14593 else if (idx + n <= (idx / k + 1) * k)
14595 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14596 return build_zero_cst (type);
14597 else if (n == k)
14598 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14599 else
14600 return fold_build3_loc (loc, code, type,
14601 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14602 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14607 /* A bit-field-ref that referenced the full argument can be stripped. */
14608 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14609 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14610 && integer_zerop (op2))
14611 return fold_convert_loc (loc, type, arg0);
14613 /* On constants we can use native encode/interpret to constant
14614 fold (nearly) all BIT_FIELD_REFs. */
14615 if (CONSTANT_CLASS_P (arg0)
14616 && can_native_interpret_type_p (type)
14617 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14618 /* This limitation should not be necessary, we just need to
14619 round this up to mode size. */
14620 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14621 /* Need bit-shifting of the buffer to relax the following. */
14622 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14624 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14625 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14626 unsigned HOST_WIDE_INT clen;
14627 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14628 /* ??? We cannot tell native_encode_expr to start at
14629 some random byte only. So limit us to a reasonable amount
14630 of work. */
14631 if (clen <= 4096)
14633 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14634 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14635 if (len > 0
14636 && len * BITS_PER_UNIT >= bitpos + bitsize)
14638 tree v = native_interpret_expr (type,
14639 b + bitpos / BITS_PER_UNIT,
14640 bitsize / BITS_PER_UNIT);
14641 if (v)
14642 return v;
14647 return NULL_TREE;
14649 case FMA_EXPR:
14650 /* For integers we can decompose the FMA if possible. */
14651 if (TREE_CODE (arg0) == INTEGER_CST
14652 && TREE_CODE (arg1) == INTEGER_CST)
14653 return fold_build2_loc (loc, PLUS_EXPR, type,
14654 const_binop (MULT_EXPR, arg0, arg1), arg2);
14655 if (integer_zerop (arg2))
14656 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14658 return fold_fma (loc, type, arg0, arg1, arg2);
14660 case VEC_PERM_EXPR:
14661 if (TREE_CODE (arg2) == VECTOR_CST)
14663 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14664 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14665 tree t;
14666 bool need_mask_canon = false;
14667 bool all_in_vec0 = true;
14668 bool all_in_vec1 = true;
14669 bool maybe_identity = true;
14670 bool single_arg = (op0 == op1);
14671 bool changed = false;
14673 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14674 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14675 for (i = 0; i < nelts; i++)
14677 tree val = VECTOR_CST_ELT (arg2, i);
14678 if (TREE_CODE (val) != INTEGER_CST)
14679 return NULL_TREE;
14681 sel[i] = TREE_INT_CST_LOW (val) & mask;
14682 if (TREE_INT_CST_HIGH (val)
14683 || ((unsigned HOST_WIDE_INT)
14684 TREE_INT_CST_LOW (val) != sel[i]))
14685 need_mask_canon = true;
14687 if (sel[i] < nelts)
14688 all_in_vec1 = false;
14689 else
14690 all_in_vec0 = false;
14692 if ((sel[i] & (nelts-1)) != i)
14693 maybe_identity = false;
14696 if (maybe_identity)
14698 if (all_in_vec0)
14699 return op0;
14700 if (all_in_vec1)
14701 return op1;
14704 if (all_in_vec0)
14705 op1 = op0;
14706 else if (all_in_vec1)
14708 op0 = op1;
14709 for (i = 0; i < nelts; i++)
14710 sel[i] -= nelts;
14711 need_mask_canon = true;
14714 if ((TREE_CODE (op0) == VECTOR_CST
14715 || TREE_CODE (op0) == CONSTRUCTOR)
14716 && (TREE_CODE (op1) == VECTOR_CST
14717 || TREE_CODE (op1) == CONSTRUCTOR))
14719 t = fold_vec_perm (type, op0, op1, sel);
14720 if (t != NULL_TREE)
14721 return t;
14724 if (op0 == op1 && !single_arg)
14725 changed = true;
14727 if (need_mask_canon && arg2 == op2)
14729 tree *tsel = XALLOCAVEC (tree, nelts);
14730 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14731 for (i = 0; i < nelts; i++)
14732 tsel[i] = build_int_cst (eltype, sel[i]);
14733 op2 = build_vector (TREE_TYPE (arg2), tsel);
14734 changed = true;
14737 if (changed)
14738 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14740 return NULL_TREE;
14742 default:
14743 return NULL_TREE;
14744 } /* switch (code) */
14747 /* Perform constant folding and related simplification of EXPR.
14748 The related simplifications include x*1 => x, x*0 => 0, etc.,
14749 and application of the associative law.
14750 NOP_EXPR conversions may be removed freely (as long as we
14751 are careful not to change the type of the overall expression).
14752 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14753 but we can constant-fold them if they have constant operands. */
14755 #ifdef ENABLE_FOLD_CHECKING
14756 # define fold(x) fold_1 (x)
14757 static tree fold_1 (tree);
14758 static
14759 #endif
14760 tree
14761 fold (tree expr)
14763 const tree t = expr;
14764 enum tree_code code = TREE_CODE (t);
14765 enum tree_code_class kind = TREE_CODE_CLASS (code);
14766 tree tem;
14767 location_t loc = EXPR_LOCATION (expr);
14769 /* Return right away if a constant. */
14770 if (kind == tcc_constant)
14771 return t;
14773 /* CALL_EXPR-like objects with variable numbers of operands are
14774 treated specially. */
14775 if (kind == tcc_vl_exp)
14777 if (code == CALL_EXPR)
14779 tem = fold_call_expr (loc, expr, false);
14780 return tem ? tem : expr;
14782 return expr;
14785 if (IS_EXPR_CODE_CLASS (kind))
14787 tree type = TREE_TYPE (t);
14788 tree op0, op1, op2;
14790 switch (TREE_CODE_LENGTH (code))
14792 case 1:
14793 op0 = TREE_OPERAND (t, 0);
14794 tem = fold_unary_loc (loc, code, type, op0);
14795 return tem ? tem : expr;
14796 case 2:
14797 op0 = TREE_OPERAND (t, 0);
14798 op1 = TREE_OPERAND (t, 1);
14799 tem = fold_binary_loc (loc, code, type, op0, op1);
14800 return tem ? tem : expr;
14801 case 3:
14802 op0 = TREE_OPERAND (t, 0);
14803 op1 = TREE_OPERAND (t, 1);
14804 op2 = TREE_OPERAND (t, 2);
14805 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14806 return tem ? tem : expr;
14807 default:
14808 break;
14812 switch (code)
14814 case ARRAY_REF:
14816 tree op0 = TREE_OPERAND (t, 0);
14817 tree op1 = TREE_OPERAND (t, 1);
14819 if (TREE_CODE (op1) == INTEGER_CST
14820 && TREE_CODE (op0) == CONSTRUCTOR
14821 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14823 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14824 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14825 unsigned HOST_WIDE_INT begin = 0;
14827 /* Find a matching index by means of a binary search. */
14828 while (begin != end)
14830 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14831 tree index = (*elts)[middle].index;
14833 if (TREE_CODE (index) == INTEGER_CST
14834 && tree_int_cst_lt (index, op1))
14835 begin = middle + 1;
14836 else if (TREE_CODE (index) == INTEGER_CST
14837 && tree_int_cst_lt (op1, index))
14838 end = middle;
14839 else if (TREE_CODE (index) == RANGE_EXPR
14840 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14841 begin = middle + 1;
14842 else if (TREE_CODE (index) == RANGE_EXPR
14843 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14844 end = middle;
14845 else
14846 return (*elts)[middle].value;
14850 return t;
14853 /* Return a VECTOR_CST if possible. */
14854 case CONSTRUCTOR:
14856 tree type = TREE_TYPE (t);
14857 if (TREE_CODE (type) != VECTOR_TYPE)
14858 return t;
14860 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14861 unsigned HOST_WIDE_INT idx, pos = 0;
14862 tree value;
14864 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14866 if (!CONSTANT_CLASS_P (value))
14867 return t;
14868 if (TREE_CODE (value) == VECTOR_CST)
14870 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14871 vec[pos++] = VECTOR_CST_ELT (value, i);
14873 else
14874 vec[pos++] = value;
14876 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14877 vec[pos] = build_zero_cst (TREE_TYPE (type));
14879 return build_vector (type, vec);
14882 case CONST_DECL:
14883 return fold (DECL_INITIAL (t));
14885 default:
14886 return t;
14887 } /* switch (code) */
14890 #ifdef ENABLE_FOLD_CHECKING
14891 #undef fold
14893 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14894 hash_table <pointer_hash <tree_node> >);
14895 static void fold_check_failed (const_tree, const_tree);
14896 void print_fold_checksum (const_tree);
14898 /* When --enable-checking=fold, compute a digest of expr before
14899 and after actual fold call to see if fold did not accidentally
14900 change original expr. */
14902 tree
14903 fold (tree expr)
14905 tree ret;
14906 struct md5_ctx ctx;
14907 unsigned char checksum_before[16], checksum_after[16];
14908 hash_table <pointer_hash <tree_node> > ht;
14910 ht.create (32);
14911 md5_init_ctx (&ctx);
14912 fold_checksum_tree (expr, &ctx, ht);
14913 md5_finish_ctx (&ctx, checksum_before);
14914 ht.empty ();
14916 ret = fold_1 (expr);
14918 md5_init_ctx (&ctx);
14919 fold_checksum_tree (expr, &ctx, ht);
14920 md5_finish_ctx (&ctx, checksum_after);
14921 ht.dispose ();
14923 if (memcmp (checksum_before, checksum_after, 16))
14924 fold_check_failed (expr, ret);
14926 return ret;
14929 void
14930 print_fold_checksum (const_tree expr)
14932 struct md5_ctx ctx;
14933 unsigned char checksum[16], cnt;
14934 hash_table <pointer_hash <tree_node> > ht;
14936 ht.create (32);
14937 md5_init_ctx (&ctx);
14938 fold_checksum_tree (expr, &ctx, ht);
14939 md5_finish_ctx (&ctx, checksum);
14940 ht.dispose ();
14941 for (cnt = 0; cnt < 16; ++cnt)
14942 fprintf (stderr, "%02x", checksum[cnt]);
14943 putc ('\n', stderr);
14946 static void
14947 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14949 internal_error ("fold check: original tree changed by fold");
14952 static void
14953 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14954 hash_table <pointer_hash <tree_node> > ht)
14956 tree_node **slot;
14957 enum tree_code code;
14958 union tree_node buf;
14959 int i, len;
14961 recursive_label:
14962 if (expr == NULL)
14963 return;
14964 slot = ht.find_slot (expr, INSERT);
14965 if (*slot != NULL)
14966 return;
14967 *slot = CONST_CAST_TREE (expr);
14968 code = TREE_CODE (expr);
14969 if (TREE_CODE_CLASS (code) == tcc_declaration
14970 && DECL_ASSEMBLER_NAME_SET_P (expr))
14972 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14973 memcpy ((char *) &buf, expr, tree_size (expr));
14974 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14975 expr = (tree) &buf;
14977 else if (TREE_CODE_CLASS (code) == tcc_type
14978 && (TYPE_POINTER_TO (expr)
14979 || TYPE_REFERENCE_TO (expr)
14980 || TYPE_CACHED_VALUES_P (expr)
14981 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14982 || TYPE_NEXT_VARIANT (expr)))
14984 /* Allow these fields to be modified. */
14985 tree tmp;
14986 memcpy ((char *) &buf, expr, tree_size (expr));
14987 expr = tmp = (tree) &buf;
14988 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14989 TYPE_POINTER_TO (tmp) = NULL;
14990 TYPE_REFERENCE_TO (tmp) = NULL;
14991 TYPE_NEXT_VARIANT (tmp) = NULL;
14992 if (TYPE_CACHED_VALUES_P (tmp))
14994 TYPE_CACHED_VALUES_P (tmp) = 0;
14995 TYPE_CACHED_VALUES (tmp) = NULL;
14998 md5_process_bytes (expr, tree_size (expr), ctx);
14999 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
15000 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
15001 if (TREE_CODE_CLASS (code) != tcc_type
15002 && TREE_CODE_CLASS (code) != tcc_declaration
15003 && code != TREE_LIST
15004 && code != SSA_NAME
15005 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
15006 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
15007 switch (TREE_CODE_CLASS (code))
15009 case tcc_constant:
15010 switch (code)
15012 case STRING_CST:
15013 md5_process_bytes (TREE_STRING_POINTER (expr),
15014 TREE_STRING_LENGTH (expr), ctx);
15015 break;
15016 case COMPLEX_CST:
15017 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
15018 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
15019 break;
15020 case VECTOR_CST:
15021 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
15022 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
15023 break;
15024 default:
15025 break;
15027 break;
15028 case tcc_exceptional:
15029 switch (code)
15031 case TREE_LIST:
15032 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
15033 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
15034 expr = TREE_CHAIN (expr);
15035 goto recursive_label;
15036 break;
15037 case TREE_VEC:
15038 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
15039 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
15040 break;
15041 default:
15042 break;
15044 break;
15045 case tcc_expression:
15046 case tcc_reference:
15047 case tcc_comparison:
15048 case tcc_unary:
15049 case tcc_binary:
15050 case tcc_statement:
15051 case tcc_vl_exp:
15052 len = TREE_OPERAND_LENGTH (expr);
15053 for (i = 0; i < len; ++i)
15054 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
15055 break;
15056 case tcc_declaration:
15057 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
15058 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
15059 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
15061 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
15062 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
15063 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
15064 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
15065 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
15067 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
15068 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
15070 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
15072 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
15073 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
15074 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
15076 break;
15077 case tcc_type:
15078 if (TREE_CODE (expr) == ENUMERAL_TYPE)
15079 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
15080 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
15081 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
15082 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
15083 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
15084 if (INTEGRAL_TYPE_P (expr)
15085 || SCALAR_FLOAT_TYPE_P (expr))
15087 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
15088 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
15090 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
15091 if (TREE_CODE (expr) == RECORD_TYPE
15092 || TREE_CODE (expr) == UNION_TYPE
15093 || TREE_CODE (expr) == QUAL_UNION_TYPE)
15094 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
15095 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
15096 break;
15097 default:
15098 break;
15102 /* Helper function for outputting the checksum of a tree T. When
15103 debugging with gdb, you can "define mynext" to be "next" followed
15104 by "call debug_fold_checksum (op0)", then just trace down till the
15105 outputs differ. */
15107 DEBUG_FUNCTION void
15108 debug_fold_checksum (const_tree t)
15110 int i;
15111 unsigned char checksum[16];
15112 struct md5_ctx ctx;
15113 hash_table <pointer_hash <tree_node> > ht;
15114 ht.create (32);
15116 md5_init_ctx (&ctx);
15117 fold_checksum_tree (t, &ctx, ht);
15118 md5_finish_ctx (&ctx, checksum);
15119 ht.empty ();
15121 for (i = 0; i < 16; i++)
15122 fprintf (stderr, "%d ", checksum[i]);
15124 fprintf (stderr, "\n");
15127 #endif
15129 /* Fold a unary tree expression with code CODE of type TYPE with an
15130 operand OP0. LOC is the location of the resulting expression.
15131 Return a folded expression if successful. Otherwise, return a tree
15132 expression with code CODE of type TYPE with an operand OP0. */
15134 tree
15135 fold_build1_stat_loc (location_t loc,
15136 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15138 tree tem;
15139 #ifdef ENABLE_FOLD_CHECKING
15140 unsigned char checksum_before[16], checksum_after[16];
15141 struct md5_ctx ctx;
15142 hash_table <pointer_hash <tree_node> > ht;
15144 ht.create (32);
15145 md5_init_ctx (&ctx);
15146 fold_checksum_tree (op0, &ctx, ht);
15147 md5_finish_ctx (&ctx, checksum_before);
15148 ht.empty ();
15149 #endif
15151 tem = fold_unary_loc (loc, code, type, op0);
15152 if (!tem)
15153 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15155 #ifdef ENABLE_FOLD_CHECKING
15156 md5_init_ctx (&ctx);
15157 fold_checksum_tree (op0, &ctx, ht);
15158 md5_finish_ctx (&ctx, checksum_after);
15159 ht.dispose ();
15161 if (memcmp (checksum_before, checksum_after, 16))
15162 fold_check_failed (op0, tem);
15163 #endif
15164 return tem;
15167 /* Fold a binary tree expression with code CODE of type TYPE with
15168 operands OP0 and OP1. LOC is the location of the resulting
15169 expression. Return a folded expression if successful. Otherwise,
15170 return a tree expression with code CODE of type TYPE with operands
15171 OP0 and OP1. */
15173 tree
15174 fold_build2_stat_loc (location_t loc,
15175 enum tree_code code, tree type, tree op0, tree op1
15176 MEM_STAT_DECL)
15178 tree tem;
15179 #ifdef ENABLE_FOLD_CHECKING
15180 unsigned char checksum_before_op0[16],
15181 checksum_before_op1[16],
15182 checksum_after_op0[16],
15183 checksum_after_op1[16];
15184 struct md5_ctx ctx;
15185 hash_table <pointer_hash <tree_node> > ht;
15187 ht.create (32);
15188 md5_init_ctx (&ctx);
15189 fold_checksum_tree (op0, &ctx, ht);
15190 md5_finish_ctx (&ctx, checksum_before_op0);
15191 ht.empty ();
15193 md5_init_ctx (&ctx);
15194 fold_checksum_tree (op1, &ctx, ht);
15195 md5_finish_ctx (&ctx, checksum_before_op1);
15196 ht.empty ();
15197 #endif
15199 tem = fold_binary_loc (loc, code, type, op0, op1);
15200 if (!tem)
15201 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15203 #ifdef ENABLE_FOLD_CHECKING
15204 md5_init_ctx (&ctx);
15205 fold_checksum_tree (op0, &ctx, ht);
15206 md5_finish_ctx (&ctx, checksum_after_op0);
15207 ht.empty ();
15209 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15210 fold_check_failed (op0, tem);
15212 md5_init_ctx (&ctx);
15213 fold_checksum_tree (op1, &ctx, ht);
15214 md5_finish_ctx (&ctx, checksum_after_op1);
15215 ht.dispose ();
15217 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15218 fold_check_failed (op1, tem);
15219 #endif
15220 return tem;
15223 /* Fold a ternary tree expression with code CODE of type TYPE with
15224 operands OP0, OP1, and OP2. Return a folded expression if
15225 successful. Otherwise, return a tree expression with code CODE of
15226 type TYPE with operands OP0, OP1, and OP2. */
15228 tree
15229 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15230 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15232 tree tem;
15233 #ifdef ENABLE_FOLD_CHECKING
15234 unsigned char checksum_before_op0[16],
15235 checksum_before_op1[16],
15236 checksum_before_op2[16],
15237 checksum_after_op0[16],
15238 checksum_after_op1[16],
15239 checksum_after_op2[16];
15240 struct md5_ctx ctx;
15241 hash_table <pointer_hash <tree_node> > ht;
15243 ht.create (32);
15244 md5_init_ctx (&ctx);
15245 fold_checksum_tree (op0, &ctx, ht);
15246 md5_finish_ctx (&ctx, checksum_before_op0);
15247 ht.empty ();
15249 md5_init_ctx (&ctx);
15250 fold_checksum_tree (op1, &ctx, ht);
15251 md5_finish_ctx (&ctx, checksum_before_op1);
15252 ht.empty ();
15254 md5_init_ctx (&ctx);
15255 fold_checksum_tree (op2, &ctx, ht);
15256 md5_finish_ctx (&ctx, checksum_before_op2);
15257 ht.empty ();
15258 #endif
15260 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15261 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15262 if (!tem)
15263 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15265 #ifdef ENABLE_FOLD_CHECKING
15266 md5_init_ctx (&ctx);
15267 fold_checksum_tree (op0, &ctx, ht);
15268 md5_finish_ctx (&ctx, checksum_after_op0);
15269 ht.empty ();
15271 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15272 fold_check_failed (op0, tem);
15274 md5_init_ctx (&ctx);
15275 fold_checksum_tree (op1, &ctx, ht);
15276 md5_finish_ctx (&ctx, checksum_after_op1);
15277 ht.empty ();
15279 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15280 fold_check_failed (op1, tem);
15282 md5_init_ctx (&ctx);
15283 fold_checksum_tree (op2, &ctx, ht);
15284 md5_finish_ctx (&ctx, checksum_after_op2);
15285 ht.dispose ();
15287 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15288 fold_check_failed (op2, tem);
15289 #endif
15290 return tem;
15293 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15294 arguments in ARGARRAY, and a null static chain.
15295 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15296 of type TYPE from the given operands as constructed by build_call_array. */
15298 tree
15299 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15300 int nargs, tree *argarray)
15302 tree tem;
15303 #ifdef ENABLE_FOLD_CHECKING
15304 unsigned char checksum_before_fn[16],
15305 checksum_before_arglist[16],
15306 checksum_after_fn[16],
15307 checksum_after_arglist[16];
15308 struct md5_ctx ctx;
15309 hash_table <pointer_hash <tree_node> > ht;
15310 int i;
15312 ht.create (32);
15313 md5_init_ctx (&ctx);
15314 fold_checksum_tree (fn, &ctx, ht);
15315 md5_finish_ctx (&ctx, checksum_before_fn);
15316 ht.empty ();
15318 md5_init_ctx (&ctx);
15319 for (i = 0; i < nargs; i++)
15320 fold_checksum_tree (argarray[i], &ctx, ht);
15321 md5_finish_ctx (&ctx, checksum_before_arglist);
15322 ht.empty ();
15323 #endif
15325 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15327 #ifdef ENABLE_FOLD_CHECKING
15328 md5_init_ctx (&ctx);
15329 fold_checksum_tree (fn, &ctx, ht);
15330 md5_finish_ctx (&ctx, checksum_after_fn);
15331 ht.empty ();
15333 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15334 fold_check_failed (fn, tem);
15336 md5_init_ctx (&ctx);
15337 for (i = 0; i < nargs; i++)
15338 fold_checksum_tree (argarray[i], &ctx, ht);
15339 md5_finish_ctx (&ctx, checksum_after_arglist);
15340 ht.dispose ();
15342 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15343 fold_check_failed (NULL_TREE, tem);
15344 #endif
15345 return tem;
15348 /* Perform constant folding and related simplification of initializer
15349 expression EXPR. These behave identically to "fold_buildN" but ignore
15350 potential run-time traps and exceptions that fold must preserve. */
15352 #define START_FOLD_INIT \
15353 int saved_signaling_nans = flag_signaling_nans;\
15354 int saved_trapping_math = flag_trapping_math;\
15355 int saved_rounding_math = flag_rounding_math;\
15356 int saved_trapv = flag_trapv;\
15357 int saved_folding_initializer = folding_initializer;\
15358 flag_signaling_nans = 0;\
15359 flag_trapping_math = 0;\
15360 flag_rounding_math = 0;\
15361 flag_trapv = 0;\
15362 folding_initializer = 1;
15364 #define END_FOLD_INIT \
15365 flag_signaling_nans = saved_signaling_nans;\
15366 flag_trapping_math = saved_trapping_math;\
15367 flag_rounding_math = saved_rounding_math;\
15368 flag_trapv = saved_trapv;\
15369 folding_initializer = saved_folding_initializer;
15371 tree
15372 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15373 tree type, tree op)
15375 tree result;
15376 START_FOLD_INIT;
15378 result = fold_build1_loc (loc, code, type, op);
15380 END_FOLD_INIT;
15381 return result;
15384 tree
15385 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15386 tree type, tree op0, tree op1)
15388 tree result;
15389 START_FOLD_INIT;
15391 result = fold_build2_loc (loc, code, type, op0, op1);
15393 END_FOLD_INIT;
15394 return result;
15397 tree
15398 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15399 int nargs, tree *argarray)
15401 tree result;
15402 START_FOLD_INIT;
15404 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15406 END_FOLD_INIT;
15407 return result;
15410 #undef START_FOLD_INIT
15411 #undef END_FOLD_INIT
15413 /* Determine if first argument is a multiple of second argument. Return 0 if
15414 it is not, or we cannot easily determined it to be.
15416 An example of the sort of thing we care about (at this point; this routine
15417 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15418 fold cases do now) is discovering that
15420 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15422 is a multiple of
15424 SAVE_EXPR (J * 8)
15426 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15428 This code also handles discovering that
15430 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15432 is a multiple of 8 so we don't have to worry about dealing with a
15433 possible remainder.
15435 Note that we *look* inside a SAVE_EXPR only to determine how it was
15436 calculated; it is not safe for fold to do much of anything else with the
15437 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15438 at run time. For example, the latter example above *cannot* be implemented
15439 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15440 evaluation time of the original SAVE_EXPR is not necessarily the same at
15441 the time the new expression is evaluated. The only optimization of this
15442 sort that would be valid is changing
15444 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15446 divided by 8 to
15448 SAVE_EXPR (I) * SAVE_EXPR (J)
15450 (where the same SAVE_EXPR (J) is used in the original and the
15451 transformed version). */
15454 multiple_of_p (tree type, const_tree top, const_tree bottom)
15456 if (operand_equal_p (top, bottom, 0))
15457 return 1;
15459 if (TREE_CODE (type) != INTEGER_TYPE)
15460 return 0;
15462 switch (TREE_CODE (top))
15464 case BIT_AND_EXPR:
15465 /* Bitwise and provides a power of two multiple. If the mask is
15466 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15467 if (!integer_pow2p (bottom))
15468 return 0;
15469 /* FALLTHRU */
15471 case MULT_EXPR:
15472 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15473 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15475 case PLUS_EXPR:
15476 case MINUS_EXPR:
15477 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15478 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15480 case LSHIFT_EXPR:
15481 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15483 tree op1, t1;
15485 op1 = TREE_OPERAND (top, 1);
15486 /* const_binop may not detect overflow correctly,
15487 so check for it explicitly here. */
15488 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15489 > TREE_INT_CST_LOW (op1)
15490 && TREE_INT_CST_HIGH (op1) == 0
15491 && 0 != (t1 = fold_convert (type,
15492 const_binop (LSHIFT_EXPR,
15493 size_one_node,
15494 op1)))
15495 && !TREE_OVERFLOW (t1))
15496 return multiple_of_p (type, t1, bottom);
15498 return 0;
15500 case NOP_EXPR:
15501 /* Can't handle conversions from non-integral or wider integral type. */
15502 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15503 || (TYPE_PRECISION (type)
15504 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15505 return 0;
15507 /* .. fall through ... */
15509 case SAVE_EXPR:
15510 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15512 case COND_EXPR:
15513 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15514 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15516 case INTEGER_CST:
15517 if (TREE_CODE (bottom) != INTEGER_CST
15518 || integer_zerop (bottom)
15519 || (TYPE_UNSIGNED (type)
15520 && (tree_int_cst_sgn (top) < 0
15521 || tree_int_cst_sgn (bottom) < 0)))
15522 return 0;
15523 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15524 top, bottom));
15526 default:
15527 return 0;
15531 /* Return true if CODE or TYPE is known to be non-negative. */
15533 static bool
15534 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15536 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15537 && truth_value_p (code))
15538 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15539 have a signed:1 type (where the value is -1 and 0). */
15540 return true;
15541 return false;
15544 /* Return true if (CODE OP0) is known to be non-negative. If the return
15545 value is based on the assumption that signed overflow is undefined,
15546 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15547 *STRICT_OVERFLOW_P. */
15549 bool
15550 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15551 bool *strict_overflow_p)
15553 if (TYPE_UNSIGNED (type))
15554 return true;
15556 switch (code)
15558 case ABS_EXPR:
15559 /* We can't return 1 if flag_wrapv is set because
15560 ABS_EXPR<INT_MIN> = INT_MIN. */
15561 if (!INTEGRAL_TYPE_P (type))
15562 return true;
15563 if (TYPE_OVERFLOW_UNDEFINED (type))
15565 *strict_overflow_p = true;
15566 return true;
15568 break;
15570 case NON_LVALUE_EXPR:
15571 case FLOAT_EXPR:
15572 case FIX_TRUNC_EXPR:
15573 return tree_expr_nonnegative_warnv_p (op0,
15574 strict_overflow_p);
15576 case NOP_EXPR:
15578 tree inner_type = TREE_TYPE (op0);
15579 tree outer_type = type;
15581 if (TREE_CODE (outer_type) == REAL_TYPE)
15583 if (TREE_CODE (inner_type) == REAL_TYPE)
15584 return tree_expr_nonnegative_warnv_p (op0,
15585 strict_overflow_p);
15586 if (INTEGRAL_TYPE_P (inner_type))
15588 if (TYPE_UNSIGNED (inner_type))
15589 return true;
15590 return tree_expr_nonnegative_warnv_p (op0,
15591 strict_overflow_p);
15594 else if (INTEGRAL_TYPE_P (outer_type))
15596 if (TREE_CODE (inner_type) == REAL_TYPE)
15597 return tree_expr_nonnegative_warnv_p (op0,
15598 strict_overflow_p);
15599 if (INTEGRAL_TYPE_P (inner_type))
15600 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15601 && TYPE_UNSIGNED (inner_type);
15604 break;
15606 default:
15607 return tree_simple_nonnegative_warnv_p (code, type);
15610 /* We don't know sign of `t', so be conservative and return false. */
15611 return false;
15614 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15615 value is based on the assumption that signed overflow is undefined,
15616 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15617 *STRICT_OVERFLOW_P. */
15619 bool
15620 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15621 tree op1, bool *strict_overflow_p)
15623 if (TYPE_UNSIGNED (type))
15624 return true;
15626 switch (code)
15628 case POINTER_PLUS_EXPR:
15629 case PLUS_EXPR:
15630 if (FLOAT_TYPE_P (type))
15631 return (tree_expr_nonnegative_warnv_p (op0,
15632 strict_overflow_p)
15633 && tree_expr_nonnegative_warnv_p (op1,
15634 strict_overflow_p));
15636 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15637 both unsigned and at least 2 bits shorter than the result. */
15638 if (TREE_CODE (type) == INTEGER_TYPE
15639 && TREE_CODE (op0) == NOP_EXPR
15640 && TREE_CODE (op1) == NOP_EXPR)
15642 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15643 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15644 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15645 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15647 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15648 TYPE_PRECISION (inner2)) + 1;
15649 return prec < TYPE_PRECISION (type);
15652 break;
15654 case MULT_EXPR:
15655 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15657 /* x * x is always non-negative for floating point x
15658 or without overflow. */
15659 if (operand_equal_p (op0, op1, 0)
15660 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15661 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15663 if (TYPE_OVERFLOW_UNDEFINED (type))
15664 *strict_overflow_p = true;
15665 return true;
15669 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15670 both unsigned and their total bits is shorter than the result. */
15671 if (TREE_CODE (type) == INTEGER_TYPE
15672 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15673 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15675 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15676 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15677 : TREE_TYPE (op0);
15678 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15679 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15680 : TREE_TYPE (op1);
15682 bool unsigned0 = TYPE_UNSIGNED (inner0);
15683 bool unsigned1 = TYPE_UNSIGNED (inner1);
15685 if (TREE_CODE (op0) == INTEGER_CST)
15686 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15688 if (TREE_CODE (op1) == INTEGER_CST)
15689 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15691 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15692 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15694 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15695 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15696 : TYPE_PRECISION (inner0);
15698 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15699 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15700 : TYPE_PRECISION (inner1);
15702 return precision0 + precision1 < TYPE_PRECISION (type);
15705 return false;
15707 case BIT_AND_EXPR:
15708 case MAX_EXPR:
15709 return (tree_expr_nonnegative_warnv_p (op0,
15710 strict_overflow_p)
15711 || tree_expr_nonnegative_warnv_p (op1,
15712 strict_overflow_p));
15714 case BIT_IOR_EXPR:
15715 case BIT_XOR_EXPR:
15716 case MIN_EXPR:
15717 case RDIV_EXPR:
15718 case TRUNC_DIV_EXPR:
15719 case CEIL_DIV_EXPR:
15720 case FLOOR_DIV_EXPR:
15721 case ROUND_DIV_EXPR:
15722 return (tree_expr_nonnegative_warnv_p (op0,
15723 strict_overflow_p)
15724 && tree_expr_nonnegative_warnv_p (op1,
15725 strict_overflow_p));
15727 case TRUNC_MOD_EXPR:
15728 case CEIL_MOD_EXPR:
15729 case FLOOR_MOD_EXPR:
15730 case ROUND_MOD_EXPR:
15731 return tree_expr_nonnegative_warnv_p (op0,
15732 strict_overflow_p);
15733 default:
15734 return tree_simple_nonnegative_warnv_p (code, type);
15737 /* We don't know sign of `t', so be conservative and return false. */
15738 return false;
15741 /* Return true if T is known to be non-negative. If the return
15742 value is based on the assumption that signed overflow is undefined,
15743 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15744 *STRICT_OVERFLOW_P. */
15746 bool
15747 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15749 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15750 return true;
15752 switch (TREE_CODE (t))
15754 case INTEGER_CST:
15755 return tree_int_cst_sgn (t) >= 0;
15757 case REAL_CST:
15758 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15760 case FIXED_CST:
15761 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15763 case COND_EXPR:
15764 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15765 strict_overflow_p)
15766 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15767 strict_overflow_p));
15768 default:
15769 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15770 TREE_TYPE (t));
15772 /* We don't know sign of `t', so be conservative and return false. */
15773 return false;
15776 /* Return true if T is known to be non-negative. If the return
15777 value is based on the assumption that signed overflow is undefined,
15778 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15779 *STRICT_OVERFLOW_P. */
15781 bool
15782 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15783 tree arg0, tree arg1, bool *strict_overflow_p)
15785 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15786 switch (DECL_FUNCTION_CODE (fndecl))
15788 CASE_FLT_FN (BUILT_IN_ACOS):
15789 CASE_FLT_FN (BUILT_IN_ACOSH):
15790 CASE_FLT_FN (BUILT_IN_CABS):
15791 CASE_FLT_FN (BUILT_IN_COSH):
15792 CASE_FLT_FN (BUILT_IN_ERFC):
15793 CASE_FLT_FN (BUILT_IN_EXP):
15794 CASE_FLT_FN (BUILT_IN_EXP10):
15795 CASE_FLT_FN (BUILT_IN_EXP2):
15796 CASE_FLT_FN (BUILT_IN_FABS):
15797 CASE_FLT_FN (BUILT_IN_FDIM):
15798 CASE_FLT_FN (BUILT_IN_HYPOT):
15799 CASE_FLT_FN (BUILT_IN_POW10):
15800 CASE_INT_FN (BUILT_IN_FFS):
15801 CASE_INT_FN (BUILT_IN_PARITY):
15802 CASE_INT_FN (BUILT_IN_POPCOUNT):
15803 CASE_INT_FN (BUILT_IN_CLZ):
15804 CASE_INT_FN (BUILT_IN_CLRSB):
15805 case BUILT_IN_BSWAP32:
15806 case BUILT_IN_BSWAP64:
15807 /* Always true. */
15808 return true;
15810 CASE_FLT_FN (BUILT_IN_SQRT):
15811 /* sqrt(-0.0) is -0.0. */
15812 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15813 return true;
15814 return tree_expr_nonnegative_warnv_p (arg0,
15815 strict_overflow_p);
15817 CASE_FLT_FN (BUILT_IN_ASINH):
15818 CASE_FLT_FN (BUILT_IN_ATAN):
15819 CASE_FLT_FN (BUILT_IN_ATANH):
15820 CASE_FLT_FN (BUILT_IN_CBRT):
15821 CASE_FLT_FN (BUILT_IN_CEIL):
15822 CASE_FLT_FN (BUILT_IN_ERF):
15823 CASE_FLT_FN (BUILT_IN_EXPM1):
15824 CASE_FLT_FN (BUILT_IN_FLOOR):
15825 CASE_FLT_FN (BUILT_IN_FMOD):
15826 CASE_FLT_FN (BUILT_IN_FREXP):
15827 CASE_FLT_FN (BUILT_IN_ICEIL):
15828 CASE_FLT_FN (BUILT_IN_IFLOOR):
15829 CASE_FLT_FN (BUILT_IN_IRINT):
15830 CASE_FLT_FN (BUILT_IN_IROUND):
15831 CASE_FLT_FN (BUILT_IN_LCEIL):
15832 CASE_FLT_FN (BUILT_IN_LDEXP):
15833 CASE_FLT_FN (BUILT_IN_LFLOOR):
15834 CASE_FLT_FN (BUILT_IN_LLCEIL):
15835 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15836 CASE_FLT_FN (BUILT_IN_LLRINT):
15837 CASE_FLT_FN (BUILT_IN_LLROUND):
15838 CASE_FLT_FN (BUILT_IN_LRINT):
15839 CASE_FLT_FN (BUILT_IN_LROUND):
15840 CASE_FLT_FN (BUILT_IN_MODF):
15841 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15842 CASE_FLT_FN (BUILT_IN_RINT):
15843 CASE_FLT_FN (BUILT_IN_ROUND):
15844 CASE_FLT_FN (BUILT_IN_SCALB):
15845 CASE_FLT_FN (BUILT_IN_SCALBLN):
15846 CASE_FLT_FN (BUILT_IN_SCALBN):
15847 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15848 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15849 CASE_FLT_FN (BUILT_IN_SINH):
15850 CASE_FLT_FN (BUILT_IN_TANH):
15851 CASE_FLT_FN (BUILT_IN_TRUNC):
15852 /* True if the 1st argument is nonnegative. */
15853 return tree_expr_nonnegative_warnv_p (arg0,
15854 strict_overflow_p);
15856 CASE_FLT_FN (BUILT_IN_FMAX):
15857 /* True if the 1st OR 2nd arguments are nonnegative. */
15858 return (tree_expr_nonnegative_warnv_p (arg0,
15859 strict_overflow_p)
15860 || (tree_expr_nonnegative_warnv_p (arg1,
15861 strict_overflow_p)));
15863 CASE_FLT_FN (BUILT_IN_FMIN):
15864 /* True if the 1st AND 2nd arguments are nonnegative. */
15865 return (tree_expr_nonnegative_warnv_p (arg0,
15866 strict_overflow_p)
15867 && (tree_expr_nonnegative_warnv_p (arg1,
15868 strict_overflow_p)));
15870 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15871 /* True if the 2nd argument is nonnegative. */
15872 return tree_expr_nonnegative_warnv_p (arg1,
15873 strict_overflow_p);
15875 CASE_FLT_FN (BUILT_IN_POWI):
15876 /* True if the 1st argument is nonnegative or the second
15877 argument is an even integer. */
15878 if (TREE_CODE (arg1) == INTEGER_CST
15879 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15880 return true;
15881 return tree_expr_nonnegative_warnv_p (arg0,
15882 strict_overflow_p);
15884 CASE_FLT_FN (BUILT_IN_POW):
15885 /* True if the 1st argument is nonnegative or the second
15886 argument is an even integer valued real. */
15887 if (TREE_CODE (arg1) == REAL_CST)
15889 REAL_VALUE_TYPE c;
15890 HOST_WIDE_INT n;
15892 c = TREE_REAL_CST (arg1);
15893 n = real_to_integer (&c);
15894 if ((n & 1) == 0)
15896 REAL_VALUE_TYPE cint;
15897 real_from_integer (&cint, VOIDmode, n,
15898 n < 0 ? -1 : 0, 0);
15899 if (real_identical (&c, &cint))
15900 return true;
15903 return tree_expr_nonnegative_warnv_p (arg0,
15904 strict_overflow_p);
15906 default:
15907 break;
15909 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15910 type);
15913 /* Return true if T is known to be non-negative. If the return
15914 value is based on the assumption that signed overflow is undefined,
15915 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15916 *STRICT_OVERFLOW_P. */
15918 static bool
15919 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15921 enum tree_code code = TREE_CODE (t);
15922 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15923 return true;
15925 switch (code)
15927 case TARGET_EXPR:
15929 tree temp = TARGET_EXPR_SLOT (t);
15930 t = TARGET_EXPR_INITIAL (t);
15932 /* If the initializer is non-void, then it's a normal expression
15933 that will be assigned to the slot. */
15934 if (!VOID_TYPE_P (t))
15935 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15937 /* Otherwise, the initializer sets the slot in some way. One common
15938 way is an assignment statement at the end of the initializer. */
15939 while (1)
15941 if (TREE_CODE (t) == BIND_EXPR)
15942 t = expr_last (BIND_EXPR_BODY (t));
15943 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15944 || TREE_CODE (t) == TRY_CATCH_EXPR)
15945 t = expr_last (TREE_OPERAND (t, 0));
15946 else if (TREE_CODE (t) == STATEMENT_LIST)
15947 t = expr_last (t);
15948 else
15949 break;
15951 if (TREE_CODE (t) == MODIFY_EXPR
15952 && TREE_OPERAND (t, 0) == temp)
15953 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15954 strict_overflow_p);
15956 return false;
15959 case CALL_EXPR:
15961 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15962 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15964 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15965 get_callee_fndecl (t),
15966 arg0,
15967 arg1,
15968 strict_overflow_p);
15970 case COMPOUND_EXPR:
15971 case MODIFY_EXPR:
15972 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15973 strict_overflow_p);
15974 case BIND_EXPR:
15975 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15976 strict_overflow_p);
15977 case SAVE_EXPR:
15978 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15979 strict_overflow_p);
15981 default:
15982 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15983 TREE_TYPE (t));
15986 /* We don't know sign of `t', so be conservative and return false. */
15987 return false;
15990 /* Return true if T is known to be non-negative. If the return
15991 value is based on the assumption that signed overflow is undefined,
15992 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15993 *STRICT_OVERFLOW_P. */
15995 bool
15996 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15998 enum tree_code code;
15999 if (t == error_mark_node)
16000 return false;
16002 code = TREE_CODE (t);
16003 switch (TREE_CODE_CLASS (code))
16005 case tcc_binary:
16006 case tcc_comparison:
16007 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16008 TREE_TYPE (t),
16009 TREE_OPERAND (t, 0),
16010 TREE_OPERAND (t, 1),
16011 strict_overflow_p);
16013 case tcc_unary:
16014 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16015 TREE_TYPE (t),
16016 TREE_OPERAND (t, 0),
16017 strict_overflow_p);
16019 case tcc_constant:
16020 case tcc_declaration:
16021 case tcc_reference:
16022 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16024 default:
16025 break;
16028 switch (code)
16030 case TRUTH_AND_EXPR:
16031 case TRUTH_OR_EXPR:
16032 case TRUTH_XOR_EXPR:
16033 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16034 TREE_TYPE (t),
16035 TREE_OPERAND (t, 0),
16036 TREE_OPERAND (t, 1),
16037 strict_overflow_p);
16038 case TRUTH_NOT_EXPR:
16039 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16040 TREE_TYPE (t),
16041 TREE_OPERAND (t, 0),
16042 strict_overflow_p);
16044 case COND_EXPR:
16045 case CONSTRUCTOR:
16046 case OBJ_TYPE_REF:
16047 case ASSERT_EXPR:
16048 case ADDR_EXPR:
16049 case WITH_SIZE_EXPR:
16050 case SSA_NAME:
16051 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16053 default:
16054 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
16058 /* Return true if `t' is known to be non-negative. Handle warnings
16059 about undefined signed overflow. */
16061 bool
16062 tree_expr_nonnegative_p (tree t)
16064 bool ret, strict_overflow_p;
16066 strict_overflow_p = false;
16067 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
16068 if (strict_overflow_p)
16069 fold_overflow_warning (("assuming signed overflow does not occur when "
16070 "determining that expression is always "
16071 "non-negative"),
16072 WARN_STRICT_OVERFLOW_MISC);
16073 return ret;
16077 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16078 For floating point we further ensure that T is not denormal.
16079 Similar logic is present in nonzero_address in rtlanal.h.
16081 If the return value is based on the assumption that signed overflow
16082 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16083 change *STRICT_OVERFLOW_P. */
16085 bool
16086 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16087 bool *strict_overflow_p)
16089 switch (code)
16091 case ABS_EXPR:
16092 return tree_expr_nonzero_warnv_p (op0,
16093 strict_overflow_p);
16095 case NOP_EXPR:
16097 tree inner_type = TREE_TYPE (op0);
16098 tree outer_type = type;
16100 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16101 && tree_expr_nonzero_warnv_p (op0,
16102 strict_overflow_p));
16104 break;
16106 case NON_LVALUE_EXPR:
16107 return tree_expr_nonzero_warnv_p (op0,
16108 strict_overflow_p);
16110 default:
16111 break;
16114 return false;
16117 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16118 For floating point we further ensure that T is not denormal.
16119 Similar logic is present in nonzero_address in rtlanal.h.
16121 If the return value is based on the assumption that signed overflow
16122 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16123 change *STRICT_OVERFLOW_P. */
16125 bool
16126 tree_binary_nonzero_warnv_p (enum tree_code code,
16127 tree type,
16128 tree op0,
16129 tree op1, bool *strict_overflow_p)
16131 bool sub_strict_overflow_p;
16132 switch (code)
16134 case POINTER_PLUS_EXPR:
16135 case PLUS_EXPR:
16136 if (TYPE_OVERFLOW_UNDEFINED (type))
16138 /* With the presence of negative values it is hard
16139 to say something. */
16140 sub_strict_overflow_p = false;
16141 if (!tree_expr_nonnegative_warnv_p (op0,
16142 &sub_strict_overflow_p)
16143 || !tree_expr_nonnegative_warnv_p (op1,
16144 &sub_strict_overflow_p))
16145 return false;
16146 /* One of operands must be positive and the other non-negative. */
16147 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16148 overflows, on a twos-complement machine the sum of two
16149 nonnegative numbers can never be zero. */
16150 return (tree_expr_nonzero_warnv_p (op0,
16151 strict_overflow_p)
16152 || tree_expr_nonzero_warnv_p (op1,
16153 strict_overflow_p));
16155 break;
16157 case MULT_EXPR:
16158 if (TYPE_OVERFLOW_UNDEFINED (type))
16160 if (tree_expr_nonzero_warnv_p (op0,
16161 strict_overflow_p)
16162 && tree_expr_nonzero_warnv_p (op1,
16163 strict_overflow_p))
16165 *strict_overflow_p = true;
16166 return true;
16169 break;
16171 case MIN_EXPR:
16172 sub_strict_overflow_p = false;
16173 if (tree_expr_nonzero_warnv_p (op0,
16174 &sub_strict_overflow_p)
16175 && tree_expr_nonzero_warnv_p (op1,
16176 &sub_strict_overflow_p))
16178 if (sub_strict_overflow_p)
16179 *strict_overflow_p = true;
16181 break;
16183 case MAX_EXPR:
16184 sub_strict_overflow_p = false;
16185 if (tree_expr_nonzero_warnv_p (op0,
16186 &sub_strict_overflow_p))
16188 if (sub_strict_overflow_p)
16189 *strict_overflow_p = true;
16191 /* When both operands are nonzero, then MAX must be too. */
16192 if (tree_expr_nonzero_warnv_p (op1,
16193 strict_overflow_p))
16194 return true;
16196 /* MAX where operand 0 is positive is positive. */
16197 return tree_expr_nonnegative_warnv_p (op0,
16198 strict_overflow_p);
16200 /* MAX where operand 1 is positive is positive. */
16201 else if (tree_expr_nonzero_warnv_p (op1,
16202 &sub_strict_overflow_p)
16203 && tree_expr_nonnegative_warnv_p (op1,
16204 &sub_strict_overflow_p))
16206 if (sub_strict_overflow_p)
16207 *strict_overflow_p = true;
16208 return true;
16210 break;
16212 case BIT_IOR_EXPR:
16213 return (tree_expr_nonzero_warnv_p (op1,
16214 strict_overflow_p)
16215 || tree_expr_nonzero_warnv_p (op0,
16216 strict_overflow_p));
16218 default:
16219 break;
16222 return false;
16225 /* Return true when T is an address and is known to be nonzero.
16226 For floating point we further ensure that T is not denormal.
16227 Similar logic is present in nonzero_address in rtlanal.h.
16229 If the return value is based on the assumption that signed overflow
16230 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16231 change *STRICT_OVERFLOW_P. */
16233 bool
16234 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16236 bool sub_strict_overflow_p;
16237 switch (TREE_CODE (t))
16239 case INTEGER_CST:
16240 return !integer_zerop (t);
16242 case ADDR_EXPR:
16244 tree base = TREE_OPERAND (t, 0);
16245 if (!DECL_P (base))
16246 base = get_base_address (base);
16248 if (!base)
16249 return false;
16251 /* Weak declarations may link to NULL. Other things may also be NULL
16252 so protect with -fdelete-null-pointer-checks; but not variables
16253 allocated on the stack. */
16254 if (DECL_P (base)
16255 && (flag_delete_null_pointer_checks
16256 || (DECL_CONTEXT (base)
16257 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16258 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16259 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16261 /* Constants are never weak. */
16262 if (CONSTANT_CLASS_P (base))
16263 return true;
16265 return false;
16268 case COND_EXPR:
16269 sub_strict_overflow_p = false;
16270 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16271 &sub_strict_overflow_p)
16272 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16273 &sub_strict_overflow_p))
16275 if (sub_strict_overflow_p)
16276 *strict_overflow_p = true;
16277 return true;
16279 break;
16281 default:
16282 break;
16284 return false;
16287 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16288 attempt to fold the expression to a constant without modifying TYPE,
16289 OP0 or OP1.
16291 If the expression could be simplified to a constant, then return
16292 the constant. If the expression would not be simplified to a
16293 constant, then return NULL_TREE. */
16295 tree
16296 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16298 tree tem = fold_binary (code, type, op0, op1);
16299 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16302 /* Given the components of a unary expression CODE, TYPE and OP0,
16303 attempt to fold the expression to a constant without modifying
16304 TYPE or OP0.
16306 If the expression could be simplified to a constant, then return
16307 the constant. If the expression would not be simplified to a
16308 constant, then return NULL_TREE. */
16310 tree
16311 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16313 tree tem = fold_unary (code, type, op0);
16314 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16317 /* If EXP represents referencing an element in a constant string
16318 (either via pointer arithmetic or array indexing), return the
16319 tree representing the value accessed, otherwise return NULL. */
16321 tree
16322 fold_read_from_constant_string (tree exp)
16324 if ((TREE_CODE (exp) == INDIRECT_REF
16325 || TREE_CODE (exp) == ARRAY_REF)
16326 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16328 tree exp1 = TREE_OPERAND (exp, 0);
16329 tree index;
16330 tree string;
16331 location_t loc = EXPR_LOCATION (exp);
16333 if (TREE_CODE (exp) == INDIRECT_REF)
16334 string = string_constant (exp1, &index);
16335 else
16337 tree low_bound = array_ref_low_bound (exp);
16338 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16340 /* Optimize the special-case of a zero lower bound.
16342 We convert the low_bound to sizetype to avoid some problems
16343 with constant folding. (E.g. suppose the lower bound is 1,
16344 and its mode is QI. Without the conversion,l (ARRAY
16345 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16346 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16347 if (! integer_zerop (low_bound))
16348 index = size_diffop_loc (loc, index,
16349 fold_convert_loc (loc, sizetype, low_bound));
16351 string = exp1;
16354 if (string
16355 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16356 && TREE_CODE (string) == STRING_CST
16357 && TREE_CODE (index) == INTEGER_CST
16358 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16359 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16360 == MODE_INT)
16361 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16362 return build_int_cst_type (TREE_TYPE (exp),
16363 (TREE_STRING_POINTER (string)
16364 [TREE_INT_CST_LOW (index)]));
16366 return NULL;
16369 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16370 an integer constant, real, or fixed-point constant.
16372 TYPE is the type of the result. */
16374 static tree
16375 fold_negate_const (tree arg0, tree type)
16377 tree t = NULL_TREE;
16379 switch (TREE_CODE (arg0))
16381 case INTEGER_CST:
16383 double_int val = tree_to_double_int (arg0);
16384 bool overflow;
16385 val = val.neg_with_overflow (&overflow);
16386 t = force_fit_type_double (type, val, 1,
16387 (overflow | TREE_OVERFLOW (arg0))
16388 && !TYPE_UNSIGNED (type));
16389 break;
16392 case REAL_CST:
16393 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16394 break;
16396 case FIXED_CST:
16398 FIXED_VALUE_TYPE f;
16399 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16400 &(TREE_FIXED_CST (arg0)), NULL,
16401 TYPE_SATURATING (type));
16402 t = build_fixed (type, f);
16403 /* Propagate overflow flags. */
16404 if (overflow_p | TREE_OVERFLOW (arg0))
16405 TREE_OVERFLOW (t) = 1;
16406 break;
16409 default:
16410 gcc_unreachable ();
16413 return t;
16416 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16417 an integer constant or real constant.
16419 TYPE is the type of the result. */
16421 tree
16422 fold_abs_const (tree arg0, tree type)
16424 tree t = NULL_TREE;
16426 switch (TREE_CODE (arg0))
16428 case INTEGER_CST:
16430 double_int val = tree_to_double_int (arg0);
16432 /* If the value is unsigned or non-negative, then the absolute value
16433 is the same as the ordinary value. */
16434 if (TYPE_UNSIGNED (type)
16435 || !val.is_negative ())
16436 t = arg0;
16438 /* If the value is negative, then the absolute value is
16439 its negation. */
16440 else
16442 bool overflow;
16443 val = val.neg_with_overflow (&overflow);
16444 t = force_fit_type_double (type, val, -1,
16445 overflow | TREE_OVERFLOW (arg0));
16448 break;
16450 case REAL_CST:
16451 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16452 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16453 else
16454 t = arg0;
16455 break;
16457 default:
16458 gcc_unreachable ();
16461 return t;
16464 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16465 constant. TYPE is the type of the result. */
16467 static tree
16468 fold_not_const (const_tree arg0, tree type)
16470 double_int val;
16472 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16474 val = ~tree_to_double_int (arg0);
16475 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16478 /* Given CODE, a relational operator, the target type, TYPE and two
16479 constant operands OP0 and OP1, return the result of the
16480 relational operation. If the result is not a compile time
16481 constant, then return NULL_TREE. */
16483 static tree
16484 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16486 int result, invert;
16488 /* From here on, the only cases we handle are when the result is
16489 known to be a constant. */
16491 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16493 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16494 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16496 /* Handle the cases where either operand is a NaN. */
16497 if (real_isnan (c0) || real_isnan (c1))
16499 switch (code)
16501 case EQ_EXPR:
16502 case ORDERED_EXPR:
16503 result = 0;
16504 break;
16506 case NE_EXPR:
16507 case UNORDERED_EXPR:
16508 case UNLT_EXPR:
16509 case UNLE_EXPR:
16510 case UNGT_EXPR:
16511 case UNGE_EXPR:
16512 case UNEQ_EXPR:
16513 result = 1;
16514 break;
16516 case LT_EXPR:
16517 case LE_EXPR:
16518 case GT_EXPR:
16519 case GE_EXPR:
16520 case LTGT_EXPR:
16521 if (flag_trapping_math)
16522 return NULL_TREE;
16523 result = 0;
16524 break;
16526 default:
16527 gcc_unreachable ();
16530 return constant_boolean_node (result, type);
16533 return constant_boolean_node (real_compare (code, c0, c1), type);
16536 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16538 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16539 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16540 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16543 /* Handle equality/inequality of complex constants. */
16544 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16546 tree rcond = fold_relational_const (code, type,
16547 TREE_REALPART (op0),
16548 TREE_REALPART (op1));
16549 tree icond = fold_relational_const (code, type,
16550 TREE_IMAGPART (op0),
16551 TREE_IMAGPART (op1));
16552 if (code == EQ_EXPR)
16553 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16554 else if (code == NE_EXPR)
16555 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16556 else
16557 return NULL_TREE;
16560 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16562 unsigned count = VECTOR_CST_NELTS (op0);
16563 tree *elts = XALLOCAVEC (tree, count);
16564 gcc_assert (VECTOR_CST_NELTS (op1) == count
16565 && TYPE_VECTOR_SUBPARTS (type) == count);
16567 for (unsigned i = 0; i < count; i++)
16569 tree elem_type = TREE_TYPE (type);
16570 tree elem0 = VECTOR_CST_ELT (op0, i);
16571 tree elem1 = VECTOR_CST_ELT (op1, i);
16573 tree tem = fold_relational_const (code, elem_type,
16574 elem0, elem1);
16576 if (tem == NULL_TREE)
16577 return NULL_TREE;
16579 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16582 return build_vector (type, elts);
16585 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16587 To compute GT, swap the arguments and do LT.
16588 To compute GE, do LT and invert the result.
16589 To compute LE, swap the arguments, do LT and invert the result.
16590 To compute NE, do EQ and invert the result.
16592 Therefore, the code below must handle only EQ and LT. */
16594 if (code == LE_EXPR || code == GT_EXPR)
16596 tree tem = op0;
16597 op0 = op1;
16598 op1 = tem;
16599 code = swap_tree_comparison (code);
16602 /* Note that it is safe to invert for real values here because we
16603 have already handled the one case that it matters. */
16605 invert = 0;
16606 if (code == NE_EXPR || code == GE_EXPR)
16608 invert = 1;
16609 code = invert_tree_comparison (code, false);
16612 /* Compute a result for LT or EQ if args permit;
16613 Otherwise return T. */
16614 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16616 if (code == EQ_EXPR)
16617 result = tree_int_cst_equal (op0, op1);
16618 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16619 result = INT_CST_LT_UNSIGNED (op0, op1);
16620 else
16621 result = INT_CST_LT (op0, op1);
16623 else
16624 return NULL_TREE;
16626 if (invert)
16627 result ^= 1;
16628 return constant_boolean_node (result, type);
16631 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16632 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16633 itself. */
16635 tree
16636 fold_build_cleanup_point_expr (tree type, tree expr)
16638 /* If the expression does not have side effects then we don't have to wrap
16639 it with a cleanup point expression. */
16640 if (!TREE_SIDE_EFFECTS (expr))
16641 return expr;
16643 /* If the expression is a return, check to see if the expression inside the
16644 return has no side effects or the right hand side of the modify expression
16645 inside the return. If either don't have side effects set we don't need to
16646 wrap the expression in a cleanup point expression. Note we don't check the
16647 left hand side of the modify because it should always be a return decl. */
16648 if (TREE_CODE (expr) == RETURN_EXPR)
16650 tree op = TREE_OPERAND (expr, 0);
16651 if (!op || !TREE_SIDE_EFFECTS (op))
16652 return expr;
16653 op = TREE_OPERAND (op, 1);
16654 if (!TREE_SIDE_EFFECTS (op))
16655 return expr;
16658 return build1 (CLEANUP_POINT_EXPR, type, expr);
16661 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16662 of an indirection through OP0, or NULL_TREE if no simplification is
16663 possible. */
16665 tree
16666 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16668 tree sub = op0;
16669 tree subtype;
16671 STRIP_NOPS (sub);
16672 subtype = TREE_TYPE (sub);
16673 if (!POINTER_TYPE_P (subtype))
16674 return NULL_TREE;
16676 if (TREE_CODE (sub) == ADDR_EXPR)
16678 tree op = TREE_OPERAND (sub, 0);
16679 tree optype = TREE_TYPE (op);
16680 /* *&CONST_DECL -> to the value of the const decl. */
16681 if (TREE_CODE (op) == CONST_DECL)
16682 return DECL_INITIAL (op);
16683 /* *&p => p; make sure to handle *&"str"[cst] here. */
16684 if (type == optype)
16686 tree fop = fold_read_from_constant_string (op);
16687 if (fop)
16688 return fop;
16689 else
16690 return op;
16692 /* *(foo *)&fooarray => fooarray[0] */
16693 else if (TREE_CODE (optype) == ARRAY_TYPE
16694 && type == TREE_TYPE (optype)
16695 && (!in_gimple_form
16696 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16698 tree type_domain = TYPE_DOMAIN (optype);
16699 tree min_val = size_zero_node;
16700 if (type_domain && TYPE_MIN_VALUE (type_domain))
16701 min_val = TYPE_MIN_VALUE (type_domain);
16702 if (in_gimple_form
16703 && TREE_CODE (min_val) != INTEGER_CST)
16704 return NULL_TREE;
16705 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16706 NULL_TREE, NULL_TREE);
16708 /* *(foo *)&complexfoo => __real__ complexfoo */
16709 else if (TREE_CODE (optype) == COMPLEX_TYPE
16710 && type == TREE_TYPE (optype))
16711 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16712 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16713 else if (TREE_CODE (optype) == VECTOR_TYPE
16714 && type == TREE_TYPE (optype))
16716 tree part_width = TYPE_SIZE (type);
16717 tree index = bitsize_int (0);
16718 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16722 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16723 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16725 tree op00 = TREE_OPERAND (sub, 0);
16726 tree op01 = TREE_OPERAND (sub, 1);
16728 STRIP_NOPS (op00);
16729 if (TREE_CODE (op00) == ADDR_EXPR)
16731 tree op00type;
16732 op00 = TREE_OPERAND (op00, 0);
16733 op00type = TREE_TYPE (op00);
16735 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16736 if (TREE_CODE (op00type) == VECTOR_TYPE
16737 && type == TREE_TYPE (op00type))
16739 HOST_WIDE_INT offset = tree_to_shwi (op01);
16740 tree part_width = TYPE_SIZE (type);
16741 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16742 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16743 tree index = bitsize_int (indexi);
16745 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16746 return fold_build3_loc (loc,
16747 BIT_FIELD_REF, type, op00,
16748 part_width, index);
16751 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16752 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16753 && type == TREE_TYPE (op00type))
16755 tree size = TYPE_SIZE_UNIT (type);
16756 if (tree_int_cst_equal (size, op01))
16757 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16759 /* ((foo *)&fooarray)[1] => fooarray[1] */
16760 else if (TREE_CODE (op00type) == ARRAY_TYPE
16761 && type == TREE_TYPE (op00type))
16763 tree type_domain = TYPE_DOMAIN (op00type);
16764 tree min_val = size_zero_node;
16765 if (type_domain && TYPE_MIN_VALUE (type_domain))
16766 min_val = TYPE_MIN_VALUE (type_domain);
16767 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16768 TYPE_SIZE_UNIT (type));
16769 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16770 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16771 NULL_TREE, NULL_TREE);
16776 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16777 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16778 && type == TREE_TYPE (TREE_TYPE (subtype))
16779 && (!in_gimple_form
16780 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16782 tree type_domain;
16783 tree min_val = size_zero_node;
16784 sub = build_fold_indirect_ref_loc (loc, sub);
16785 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16786 if (type_domain && TYPE_MIN_VALUE (type_domain))
16787 min_val = TYPE_MIN_VALUE (type_domain);
16788 if (in_gimple_form
16789 && TREE_CODE (min_val) != INTEGER_CST)
16790 return NULL_TREE;
16791 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16792 NULL_TREE);
16795 return NULL_TREE;
16798 /* Builds an expression for an indirection through T, simplifying some
16799 cases. */
16801 tree
16802 build_fold_indirect_ref_loc (location_t loc, tree t)
16804 tree type = TREE_TYPE (TREE_TYPE (t));
16805 tree sub = fold_indirect_ref_1 (loc, type, t);
16807 if (sub)
16808 return sub;
16810 return build1_loc (loc, INDIRECT_REF, type, t);
16813 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16815 tree
16816 fold_indirect_ref_loc (location_t loc, tree t)
16818 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16820 if (sub)
16821 return sub;
16822 else
16823 return t;
16826 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16827 whose result is ignored. The type of the returned tree need not be
16828 the same as the original expression. */
16830 tree
16831 fold_ignored_result (tree t)
16833 if (!TREE_SIDE_EFFECTS (t))
16834 return integer_zero_node;
16836 for (;;)
16837 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16839 case tcc_unary:
16840 t = TREE_OPERAND (t, 0);
16841 break;
16843 case tcc_binary:
16844 case tcc_comparison:
16845 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16846 t = TREE_OPERAND (t, 0);
16847 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16848 t = TREE_OPERAND (t, 1);
16849 else
16850 return t;
16851 break;
16853 case tcc_expression:
16854 switch (TREE_CODE (t))
16856 case COMPOUND_EXPR:
16857 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16858 return t;
16859 t = TREE_OPERAND (t, 0);
16860 break;
16862 case COND_EXPR:
16863 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16864 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16865 return t;
16866 t = TREE_OPERAND (t, 0);
16867 break;
16869 default:
16870 return t;
16872 break;
16874 default:
16875 return t;
16879 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16880 This can only be applied to objects of a sizetype. */
16882 tree
16883 round_up_loc (location_t loc, tree value, int divisor)
16885 tree div = NULL_TREE;
16887 gcc_assert (divisor > 0);
16888 if (divisor == 1)
16889 return value;
16891 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16892 have to do anything. Only do this when we are not given a const,
16893 because in that case, this check is more expensive than just
16894 doing it. */
16895 if (TREE_CODE (value) != INTEGER_CST)
16897 div = build_int_cst (TREE_TYPE (value), divisor);
16899 if (multiple_of_p (TREE_TYPE (value), value, div))
16900 return value;
16903 /* If divisor is a power of two, simplify this to bit manipulation. */
16904 if (divisor == (divisor & -divisor))
16906 if (TREE_CODE (value) == INTEGER_CST)
16908 double_int val = tree_to_double_int (value);
16909 bool overflow_p;
16911 if ((val.low & (divisor - 1)) == 0)
16912 return value;
16914 overflow_p = TREE_OVERFLOW (value);
16915 val.low &= ~(divisor - 1);
16916 val.low += divisor;
16917 if (val.low == 0)
16919 val.high++;
16920 if (val.high == 0)
16921 overflow_p = true;
16924 return force_fit_type_double (TREE_TYPE (value), val,
16925 -1, overflow_p);
16927 else
16929 tree t;
16931 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16932 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16933 t = build_int_cst (TREE_TYPE (value), -divisor);
16934 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16937 else
16939 if (!div)
16940 div = build_int_cst (TREE_TYPE (value), divisor);
16941 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16942 value = size_binop_loc (loc, MULT_EXPR, value, div);
16945 return value;
16948 /* Likewise, but round down. */
16950 tree
16951 round_down_loc (location_t loc, tree value, int divisor)
16953 tree div = NULL_TREE;
16955 gcc_assert (divisor > 0);
16956 if (divisor == 1)
16957 return value;
16959 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16960 have to do anything. Only do this when we are not given a const,
16961 because in that case, this check is more expensive than just
16962 doing it. */
16963 if (TREE_CODE (value) != INTEGER_CST)
16965 div = build_int_cst (TREE_TYPE (value), divisor);
16967 if (multiple_of_p (TREE_TYPE (value), value, div))
16968 return value;
16971 /* If divisor is a power of two, simplify this to bit manipulation. */
16972 if (divisor == (divisor & -divisor))
16974 tree t;
16976 t = build_int_cst (TREE_TYPE (value), -divisor);
16977 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16979 else
16981 if (!div)
16982 div = build_int_cst (TREE_TYPE (value), divisor);
16983 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16984 value = size_binop_loc (loc, MULT_EXPR, value, div);
16987 return value;
16990 /* Returns the pointer to the base of the object addressed by EXP and
16991 extracts the information about the offset of the access, storing it
16992 to PBITPOS and POFFSET. */
16994 static tree
16995 split_address_to_core_and_offset (tree exp,
16996 HOST_WIDE_INT *pbitpos, tree *poffset)
16998 tree core;
16999 enum machine_mode mode;
17000 int unsignedp, volatilep;
17001 HOST_WIDE_INT bitsize;
17002 location_t loc = EXPR_LOCATION (exp);
17004 if (TREE_CODE (exp) == ADDR_EXPR)
17006 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
17007 poffset, &mode, &unsignedp, &volatilep,
17008 false);
17009 core = build_fold_addr_expr_loc (loc, core);
17011 else
17013 core = exp;
17014 *pbitpos = 0;
17015 *poffset = NULL_TREE;
17018 return core;
17021 /* Returns true if addresses of E1 and E2 differ by a constant, false
17022 otherwise. If they do, E1 - E2 is stored in *DIFF. */
17024 bool
17025 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
17027 tree core1, core2;
17028 HOST_WIDE_INT bitpos1, bitpos2;
17029 tree toffset1, toffset2, tdiff, type;
17031 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17032 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17034 if (bitpos1 % BITS_PER_UNIT != 0
17035 || bitpos2 % BITS_PER_UNIT != 0
17036 || !operand_equal_p (core1, core2, 0))
17037 return false;
17039 if (toffset1 && toffset2)
17041 type = TREE_TYPE (toffset1);
17042 if (type != TREE_TYPE (toffset2))
17043 toffset2 = fold_convert (type, toffset2);
17045 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17046 if (!cst_and_fits_in_hwi (tdiff))
17047 return false;
17049 *diff = int_cst_value (tdiff);
17051 else if (toffset1 || toffset2)
17053 /* If only one of the offsets is non-constant, the difference cannot
17054 be a constant. */
17055 return false;
17057 else
17058 *diff = 0;
17060 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17061 return true;
17064 /* Simplify the floating point expression EXP when the sign of the
17065 result is not significant. Return NULL_TREE if no simplification
17066 is possible. */
17068 tree
17069 fold_strip_sign_ops (tree exp)
17071 tree arg0, arg1;
17072 location_t loc = EXPR_LOCATION (exp);
17074 switch (TREE_CODE (exp))
17076 case ABS_EXPR:
17077 case NEGATE_EXPR:
17078 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17079 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17081 case MULT_EXPR:
17082 case RDIV_EXPR:
17083 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17084 return NULL_TREE;
17085 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17086 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17087 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17088 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17089 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17090 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17091 break;
17093 case COMPOUND_EXPR:
17094 arg0 = TREE_OPERAND (exp, 0);
17095 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17096 if (arg1)
17097 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17098 break;
17100 case COND_EXPR:
17101 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17102 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17103 if (arg0 || arg1)
17104 return fold_build3_loc (loc,
17105 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17106 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17107 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17108 break;
17110 case CALL_EXPR:
17112 const enum built_in_function fcode = builtin_mathfn_code (exp);
17113 switch (fcode)
17115 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17116 /* Strip copysign function call, return the 1st argument. */
17117 arg0 = CALL_EXPR_ARG (exp, 0);
17118 arg1 = CALL_EXPR_ARG (exp, 1);
17119 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17121 default:
17122 /* Strip sign ops from the argument of "odd" math functions. */
17123 if (negate_mathfn_p (fcode))
17125 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17126 if (arg0)
17127 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17129 break;
17132 break;
17134 default:
17135 break;
17137 return NULL_TREE;