Daily bump.
[official-gcc.git] / gcc / fold-const.c
blob71e1e0a5785a6b2403b04c4491ba111129977a20
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
72 /* Nonzero if we are folding constants inside an initializer; zero
73 otherwise. */
74 int folding_initializer = 0;
76 /* The following constants represent a bit based encoding of GCC's
77 comparison operators. This encoding simplifies transformations
78 on relational comparison operators, such as AND and OR. */
79 enum comparison_code {
80 COMPCODE_FALSE = 0,
81 COMPCODE_LT = 1,
82 COMPCODE_EQ = 2,
83 COMPCODE_LE = 3,
84 COMPCODE_GT = 4,
85 COMPCODE_LTGT = 5,
86 COMPCODE_GE = 6,
87 COMPCODE_ORD = 7,
88 COMPCODE_UNORD = 8,
89 COMPCODE_UNLT = 9,
90 COMPCODE_UNEQ = 10,
91 COMPCODE_UNLE = 11,
92 COMPCODE_UNGT = 12,
93 COMPCODE_NE = 13,
94 COMPCODE_UNGE = 14,
95 COMPCODE_TRUE = 15
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
130 tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static tree fold_mathfn_compare (location_t,
138 enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
151 static location_t
152 expr_location_or (tree t, location_t loc)
154 location_t tloc = EXPR_LOCATION (t);
155 return tloc == UNKNOWN_LOCATION ? loc : tloc;
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
161 static inline tree
162 protected_set_expr_location_unshare (tree x, location_t loc)
164 if (CAN_HAVE_LOCATION_P (x)
165 && EXPR_LOCATION (x) != loc
166 && !(TREE_CODE (x) == SAVE_EXPR
167 || TREE_CODE (x) == TARGET_EXPR
168 || TREE_CODE (x) == BIND_EXPR))
170 x = copy_node (x);
171 SET_EXPR_LOCATION (x, loc);
173 return x;
176 /* If ARG2 divides ARG1 with zero remainder, carries out the division
177 of type CODE and returns the quotient.
178 Otherwise returns NULL_TREE. */
180 tree
181 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
183 double_int quo, rem;
184 int uns;
186 /* The sign of the division is according to operand two, that
187 does the correct thing for POINTER_PLUS_EXPR where we want
188 a signed division. */
189 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
191 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
192 uns, code, &rem);
194 if (rem.is_zero ())
195 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
197 return NULL_TREE;
200 /* This is nonzero if we should defer warnings about undefined
201 overflow. This facility exists because these warnings are a
202 special case. The code to estimate loop iterations does not want
203 to issue any warnings, since it works with expressions which do not
204 occur in user code. Various bits of cleanup code call fold(), but
205 only use the result if it has certain characteristics (e.g., is a
206 constant); that code only wants to issue a warning if the result is
207 used. */
209 static int fold_deferring_overflow_warnings;
211 /* If a warning about undefined overflow is deferred, this is the
212 warning. Note that this may cause us to turn two warnings into
213 one, but that is fine since it is sufficient to only give one
214 warning per expression. */
216 static const char* fold_deferred_overflow_warning;
218 /* If a warning about undefined overflow is deferred, this is the
219 level at which the warning should be emitted. */
221 static enum warn_strict_overflow_code fold_deferred_overflow_code;
223 /* Start deferring overflow warnings. We could use a stack here to
224 permit nested calls, but at present it is not necessary. */
226 void
227 fold_defer_overflow_warnings (void)
229 ++fold_deferring_overflow_warnings;
232 /* Stop deferring overflow warnings. If there is a pending warning,
233 and ISSUE is true, then issue the warning if appropriate. STMT is
234 the statement with which the warning should be associated (used for
235 location information); STMT may be NULL. CODE is the level of the
236 warning--a warn_strict_overflow_code value. This function will use
237 the smaller of CODE and the deferred code when deciding whether to
238 issue the warning. CODE may be zero to mean to always use the
239 deferred code. */
241 void
242 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
244 const char *warnmsg;
245 location_t locus;
247 gcc_assert (fold_deferring_overflow_warnings > 0);
248 --fold_deferring_overflow_warnings;
249 if (fold_deferring_overflow_warnings > 0)
251 if (fold_deferred_overflow_warning != NULL
252 && code != 0
253 && code < (int) fold_deferred_overflow_code)
254 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
255 return;
258 warnmsg = fold_deferred_overflow_warning;
259 fold_deferred_overflow_warning = NULL;
261 if (!issue || warnmsg == NULL)
262 return;
264 if (gimple_no_warning_p (stmt))
265 return;
267 /* Use the smallest code level when deciding to issue the
268 warning. */
269 if (code == 0 || code > (int) fold_deferred_overflow_code)
270 code = fold_deferred_overflow_code;
272 if (!issue_strict_overflow_warning (code))
273 return;
275 if (stmt == NULL)
276 locus = input_location;
277 else
278 locus = gimple_location (stmt);
279 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
282 /* Stop deferring overflow warnings, ignoring any deferred
283 warnings. */
285 void
286 fold_undefer_and_ignore_overflow_warnings (void)
288 fold_undefer_overflow_warnings (false, NULL, 0);
291 /* Whether we are deferring overflow warnings. */
293 bool
294 fold_deferring_overflow_warnings_p (void)
296 return fold_deferring_overflow_warnings > 0;
299 /* This is called when we fold something based on the fact that signed
300 overflow is undefined. */
302 static void
303 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
305 if (fold_deferring_overflow_warnings > 0)
307 if (fold_deferred_overflow_warning == NULL
308 || wc < fold_deferred_overflow_code)
310 fold_deferred_overflow_warning = gmsgid;
311 fold_deferred_overflow_code = wc;
314 else if (issue_strict_overflow_warning (wc))
315 warning (OPT_Wstrict_overflow, gmsgid);
318 /* Return true if the built-in mathematical function specified by CODE
319 is odd, i.e. -f(x) == f(-x). */
321 static bool
322 negate_mathfn_p (enum built_in_function code)
324 switch (code)
326 CASE_FLT_FN (BUILT_IN_ASIN):
327 CASE_FLT_FN (BUILT_IN_ASINH):
328 CASE_FLT_FN (BUILT_IN_ATAN):
329 CASE_FLT_FN (BUILT_IN_ATANH):
330 CASE_FLT_FN (BUILT_IN_CASIN):
331 CASE_FLT_FN (BUILT_IN_CASINH):
332 CASE_FLT_FN (BUILT_IN_CATAN):
333 CASE_FLT_FN (BUILT_IN_CATANH):
334 CASE_FLT_FN (BUILT_IN_CBRT):
335 CASE_FLT_FN (BUILT_IN_CPROJ):
336 CASE_FLT_FN (BUILT_IN_CSIN):
337 CASE_FLT_FN (BUILT_IN_CSINH):
338 CASE_FLT_FN (BUILT_IN_CTAN):
339 CASE_FLT_FN (BUILT_IN_CTANH):
340 CASE_FLT_FN (BUILT_IN_ERF):
341 CASE_FLT_FN (BUILT_IN_LLROUND):
342 CASE_FLT_FN (BUILT_IN_LROUND):
343 CASE_FLT_FN (BUILT_IN_ROUND):
344 CASE_FLT_FN (BUILT_IN_SIN):
345 CASE_FLT_FN (BUILT_IN_SINH):
346 CASE_FLT_FN (BUILT_IN_TAN):
347 CASE_FLT_FN (BUILT_IN_TANH):
348 CASE_FLT_FN (BUILT_IN_TRUNC):
349 return true;
351 CASE_FLT_FN (BUILT_IN_LLRINT):
352 CASE_FLT_FN (BUILT_IN_LRINT):
353 CASE_FLT_FN (BUILT_IN_NEARBYINT):
354 CASE_FLT_FN (BUILT_IN_RINT):
355 return !flag_rounding_math;
357 default:
358 break;
360 return false;
363 /* Check whether we may negate an integer constant T without causing
364 overflow. */
366 bool
367 may_negate_without_overflow_p (const_tree t)
369 unsigned HOST_WIDE_INT val;
370 unsigned int prec;
371 tree type;
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
379 prec = TYPE_PRECISION (type);
380 if (prec > HOST_BITS_PER_WIDE_INT)
382 if (TREE_INT_CST_LOW (t) != 0)
383 return true;
384 prec -= HOST_BITS_PER_WIDE_INT;
385 val = TREE_INT_CST_HIGH (t);
387 else
388 val = TREE_INT_CST_LOW (t);
389 if (prec < HOST_BITS_PER_WIDE_INT)
390 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
391 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
394 /* Determine whether an expression T can be cheaply negated using
395 the function negate_expr without introducing undefined overflow. */
397 static bool
398 negate_expr_p (tree t)
400 tree type;
402 if (t == 0)
403 return false;
405 type = TREE_TYPE (t);
407 STRIP_SIGN_NOPS (t);
408 switch (TREE_CODE (t))
410 case INTEGER_CST:
411 if (TYPE_OVERFLOW_WRAPS (type))
412 return true;
414 /* Check that -CST will not overflow type. */
415 return may_negate_without_overflow_p (t);
416 case BIT_NOT_EXPR:
417 return (INTEGRAL_TYPE_P (type)
418 && TYPE_OVERFLOW_WRAPS (type));
420 case FIXED_CST:
421 case NEGATE_EXPR:
422 return true;
424 case REAL_CST:
425 /* We want to canonicalize to positive real constants. Pretend
426 that only negative ones can be easily negated. */
427 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
429 case COMPLEX_CST:
430 return negate_expr_p (TREE_REALPART (t))
431 && negate_expr_p (TREE_IMAGPART (t));
433 case VECTOR_CST:
435 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
436 return true;
438 int count = TYPE_VECTOR_SUBPARTS (type), i;
440 for (i = 0; i < count; i++)
441 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
442 return false;
444 return true;
447 case COMPLEX_EXPR:
448 return negate_expr_p (TREE_OPERAND (t, 0))
449 && negate_expr_p (TREE_OPERAND (t, 1));
451 case CONJ_EXPR:
452 return negate_expr_p (TREE_OPERAND (t, 0));
454 case PLUS_EXPR:
455 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
456 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
457 return false;
458 /* -(A + B) -> (-B) - A. */
459 if (negate_expr_p (TREE_OPERAND (t, 1))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1)))
462 return true;
463 /* -(A + B) -> (-A) - B. */
464 return negate_expr_p (TREE_OPERAND (t, 0));
466 case MINUS_EXPR:
467 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
468 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
469 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1));
473 case MULT_EXPR:
474 if (TYPE_UNSIGNED (TREE_TYPE (t)))
475 break;
477 /* Fall through. */
479 case RDIV_EXPR:
480 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
481 return negate_expr_p (TREE_OPERAND (t, 1))
482 || negate_expr_p (TREE_OPERAND (t, 0));
483 break;
485 case TRUNC_DIV_EXPR:
486 case ROUND_DIV_EXPR:
487 case EXACT_DIV_EXPR:
488 /* In general we can't negate A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. But if overflow is
491 undefined, we can negate, because - (INT_MIN / 1) is an
492 overflow. */
493 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
495 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
496 break;
497 /* If overflow is undefined then we have to be careful because
498 we ask whether it's ok to associate the negate with the
499 division which is not ok for example for
500 -((a - b) / c) where (-(a - b)) / c may invoke undefined
501 overflow because of negating INT_MIN. So do not use
502 negate_expr_p here but open-code the two important cases. */
503 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
504 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
505 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
506 return true;
508 else if (negate_expr_p (TREE_OPERAND (t, 0)))
509 return true;
510 return negate_expr_p (TREE_OPERAND (t, 1));
512 case NOP_EXPR:
513 /* Negate -((double)float) as (double)(-float). */
514 if (TREE_CODE (type) == REAL_TYPE)
516 tree tem = strip_float_extensions (t);
517 if (tem != t)
518 return negate_expr_p (tem);
520 break;
522 case CALL_EXPR:
523 /* Negate -f(x) as f(-x). */
524 if (negate_mathfn_p (builtin_mathfn_code (t)))
525 return negate_expr_p (CALL_EXPR_ARG (t, 0));
526 break;
528 case RSHIFT_EXPR:
529 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
530 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
532 tree op1 = TREE_OPERAND (t, 1);
533 if (TREE_INT_CST_HIGH (op1) == 0
534 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
535 == TREE_INT_CST_LOW (op1))
536 return true;
538 break;
540 default:
541 break;
543 return false;
546 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
547 simplification is possible.
548 If negate_expr_p would return true for T, NULL_TREE will never be
549 returned. */
551 static tree
552 fold_negate_expr (location_t loc, tree t)
554 tree type = TREE_TYPE (t);
555 tree tem;
557 switch (TREE_CODE (t))
559 /* Convert - (~A) to A + 1. */
560 case BIT_NOT_EXPR:
561 if (INTEGRAL_TYPE_P (type))
562 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
563 build_one_cst (type));
564 break;
566 case INTEGER_CST:
567 tem = fold_negate_const (t, type);
568 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
569 || !TYPE_OVERFLOW_TRAPS (type))
570 return tem;
571 break;
573 case REAL_CST:
574 tem = fold_negate_const (t, type);
575 /* Two's complement FP formats, such as c4x, may overflow. */
576 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
577 return tem;
578 break;
580 case FIXED_CST:
581 tem = fold_negate_const (t, type);
582 return tem;
584 case COMPLEX_CST:
586 tree rpart = negate_expr (TREE_REALPART (t));
587 tree ipart = negate_expr (TREE_IMAGPART (t));
589 if ((TREE_CODE (rpart) == REAL_CST
590 && TREE_CODE (ipart) == REAL_CST)
591 || (TREE_CODE (rpart) == INTEGER_CST
592 && TREE_CODE (ipart) == INTEGER_CST))
593 return build_complex (type, rpart, ipart);
595 break;
597 case VECTOR_CST:
599 int count = TYPE_VECTOR_SUBPARTS (type), i;
600 tree *elts = XALLOCAVEC (tree, count);
602 for (i = 0; i < count; i++)
604 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
605 if (elts[i] == NULL_TREE)
606 return NULL_TREE;
609 return build_vector (type, elts);
612 case COMPLEX_EXPR:
613 if (negate_expr_p (t))
614 return fold_build2_loc (loc, COMPLEX_EXPR, type,
615 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
616 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
617 break;
619 case CONJ_EXPR:
620 if (negate_expr_p (t))
621 return fold_build1_loc (loc, CONJ_EXPR, type,
622 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
623 break;
625 case NEGATE_EXPR:
626 return TREE_OPERAND (t, 0);
628 case PLUS_EXPR:
629 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
630 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
632 /* -(A + B) -> (-B) - A. */
633 if (negate_expr_p (TREE_OPERAND (t, 1))
634 && reorder_operands_p (TREE_OPERAND (t, 0),
635 TREE_OPERAND (t, 1)))
637 tem = negate_expr (TREE_OPERAND (t, 1));
638 return fold_build2_loc (loc, MINUS_EXPR, type,
639 tem, TREE_OPERAND (t, 0));
642 /* -(A + B) -> (-A) - B. */
643 if (negate_expr_p (TREE_OPERAND (t, 0)))
645 tem = negate_expr (TREE_OPERAND (t, 0));
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 tem, TREE_OPERAND (t, 1));
650 break;
652 case MINUS_EXPR:
653 /* - (A - B) -> B - A */
654 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
655 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
656 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
657 return fold_build2_loc (loc, MINUS_EXPR, type,
658 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
659 break;
661 case MULT_EXPR:
662 if (TYPE_UNSIGNED (type))
663 break;
665 /* Fall through. */
667 case RDIV_EXPR:
668 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
670 tem = TREE_OPERAND (t, 1);
671 if (negate_expr_p (tem))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 TREE_OPERAND (t, 0), negate_expr (tem));
674 tem = TREE_OPERAND (t, 0);
675 if (negate_expr_p (tem))
676 return fold_build2_loc (loc, TREE_CODE (t), type,
677 negate_expr (tem), TREE_OPERAND (t, 1));
679 break;
681 case TRUNC_DIV_EXPR:
682 case ROUND_DIV_EXPR:
683 case EXACT_DIV_EXPR:
684 /* In general we can't negate A / B, because if A is INT_MIN and
685 B is 1, we may turn this into INT_MIN / -1 which is undefined
686 and actually traps on some architectures. But if overflow is
687 undefined, we can negate, because - (INT_MIN / 1) is an
688 overflow. */
689 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
691 const char * const warnmsg = G_("assuming signed overflow does not "
692 "occur when negating a division");
693 tem = TREE_OPERAND (t, 1);
694 if (negate_expr_p (tem))
696 if (INTEGRAL_TYPE_P (type)
697 && (TREE_CODE (tem) != INTEGER_CST
698 || integer_onep (tem)))
699 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
700 return fold_build2_loc (loc, TREE_CODE (t), type,
701 TREE_OPERAND (t, 0), negate_expr (tem));
703 /* If overflow is undefined then we have to be careful because
704 we ask whether it's ok to associate the negate with the
705 division which is not ok for example for
706 -((a - b) / c) where (-(a - b)) / c may invoke undefined
707 overflow because of negating INT_MIN. So do not use
708 negate_expr_p here but open-code the two important cases. */
709 tem = TREE_OPERAND (t, 0);
710 if ((INTEGRAL_TYPE_P (type)
711 && (TREE_CODE (tem) == NEGATE_EXPR
712 || (TREE_CODE (tem) == INTEGER_CST
713 && may_negate_without_overflow_p (tem))))
714 || !INTEGRAL_TYPE_P (type))
715 return fold_build2_loc (loc, TREE_CODE (t), type,
716 negate_expr (tem), TREE_OPERAND (t, 1));
718 break;
720 case NOP_EXPR:
721 /* Convert -((double)float) into (double)(-float). */
722 if (TREE_CODE (type) == REAL_TYPE)
724 tem = strip_float_extensions (t);
725 if (tem != t && negate_expr_p (tem))
726 return fold_convert_loc (loc, type, negate_expr (tem));
728 break;
730 case CALL_EXPR:
731 /* Negate -f(x) as f(-x). */
732 if (negate_mathfn_p (builtin_mathfn_code (t))
733 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
735 tree fndecl, arg;
737 fndecl = get_callee_fndecl (t);
738 arg = negate_expr (CALL_EXPR_ARG (t, 0));
739 return build_call_expr_loc (loc, fndecl, 1, arg);
741 break;
743 case RSHIFT_EXPR:
744 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
745 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
747 tree op1 = TREE_OPERAND (t, 1);
748 if (TREE_INT_CST_HIGH (op1) == 0
749 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
750 == TREE_INT_CST_LOW (op1))
752 tree ntype = TYPE_UNSIGNED (type)
753 ? signed_type_for (type)
754 : unsigned_type_for (type);
755 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
756 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
757 return fold_convert_loc (loc, type, temp);
760 break;
762 default:
763 break;
766 return NULL_TREE;
769 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
770 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
771 return NULL_TREE. */
773 static tree
774 negate_expr (tree t)
776 tree type, tem;
777 location_t loc;
779 if (t == NULL_TREE)
780 return NULL_TREE;
782 loc = EXPR_LOCATION (t);
783 type = TREE_TYPE (t);
784 STRIP_SIGN_NOPS (t);
786 tem = fold_negate_expr (loc, t);
787 if (!tem)
788 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
789 return fold_convert_loc (loc, type, tem);
792 /* Split a tree IN into a constant, literal and variable parts that could be
793 combined with CODE to make IN. "constant" means an expression with
794 TREE_CONSTANT but that isn't an actual constant. CODE must be a
795 commutative arithmetic operation. Store the constant part into *CONP,
796 the literal in *LITP and return the variable part. If a part isn't
797 present, set it to null. If the tree does not decompose in this way,
798 return the entire tree as the variable part and the other parts as null.
800 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
801 case, we negate an operand that was subtracted. Except if it is a
802 literal for which we use *MINUS_LITP instead.
804 If NEGATE_P is true, we are negating all of IN, again except a literal
805 for which we use *MINUS_LITP instead.
807 If IN is itself a literal or constant, return it as appropriate.
809 Note that we do not guarantee that any of the three values will be the
810 same type as IN, but they will have the same signedness and mode. */
812 static tree
813 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
814 tree *minus_litp, int negate_p)
816 tree var = 0;
818 *conp = 0;
819 *litp = 0;
820 *minus_litp = 0;
822 /* Strip any conversions that don't change the machine mode or signedness. */
823 STRIP_SIGN_NOPS (in);
825 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
826 || TREE_CODE (in) == FIXED_CST)
827 *litp = in;
828 else if (TREE_CODE (in) == code
829 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
830 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
831 /* We can associate addition and subtraction together (even
832 though the C standard doesn't say so) for integers because
833 the value is not affected. For reals, the value might be
834 affected, so we can't. */
835 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
836 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
838 tree op0 = TREE_OPERAND (in, 0);
839 tree op1 = TREE_OPERAND (in, 1);
840 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
841 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
843 /* First see if either of the operands is a literal, then a constant. */
844 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
845 || TREE_CODE (op0) == FIXED_CST)
846 *litp = op0, op0 = 0;
847 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
848 || TREE_CODE (op1) == FIXED_CST)
849 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
851 if (op0 != 0 && TREE_CONSTANT (op0))
852 *conp = op0, op0 = 0;
853 else if (op1 != 0 && TREE_CONSTANT (op1))
854 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
856 /* If we haven't dealt with either operand, this is not a case we can
857 decompose. Otherwise, VAR is either of the ones remaining, if any. */
858 if (op0 != 0 && op1 != 0)
859 var = in;
860 else if (op0 != 0)
861 var = op0;
862 else
863 var = op1, neg_var_p = neg1_p;
865 /* Now do any needed negations. */
866 if (neg_litp_p)
867 *minus_litp = *litp, *litp = 0;
868 if (neg_conp_p)
869 *conp = negate_expr (*conp);
870 if (neg_var_p)
871 var = negate_expr (var);
873 else if (TREE_CODE (in) == BIT_NOT_EXPR
874 && code == PLUS_EXPR)
876 /* -X - 1 is folded to ~X, undo that here. */
877 *minus_litp = build_one_cst (TREE_TYPE (in));
878 var = negate_expr (TREE_OPERAND (in, 0));
880 else if (TREE_CONSTANT (in))
881 *conp = in;
882 else
883 var = in;
885 if (negate_p)
887 if (*litp)
888 *minus_litp = *litp, *litp = 0;
889 else if (*minus_litp)
890 *litp = *minus_litp, *minus_litp = 0;
891 *conp = negate_expr (*conp);
892 var = negate_expr (var);
895 return var;
898 /* Re-associate trees split by the above function. T1 and T2 are
899 either expressions to associate or null. Return the new
900 expression, if any. LOC is the location of the new expression. If
901 we build an operation, do it in TYPE and with CODE. */
903 static tree
904 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
906 if (t1 == 0)
907 return t2;
908 else if (t2 == 0)
909 return t1;
911 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
912 try to fold this since we will have infinite recursion. But do
913 deal with any NEGATE_EXPRs. */
914 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
915 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
917 if (code == PLUS_EXPR)
919 if (TREE_CODE (t1) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t2),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t1, 0)));
924 else if (TREE_CODE (t2) == NEGATE_EXPR)
925 return build2_loc (loc, MINUS_EXPR, type,
926 fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type,
928 TREE_OPERAND (t2, 0)));
929 else if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
932 else if (code == MINUS_EXPR)
934 if (integer_zerop (t2))
935 return fold_convert_loc (loc, type, t1);
938 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
942 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
946 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
947 for use in int_const_binop, size_binop and size_diffop. */
949 static bool
950 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
952 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
953 return false;
954 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
955 return false;
957 switch (code)
959 case LSHIFT_EXPR:
960 case RSHIFT_EXPR:
961 case LROTATE_EXPR:
962 case RROTATE_EXPR:
963 return true;
965 default:
966 break;
969 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
970 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
971 && TYPE_MODE (type1) == TYPE_MODE (type2);
975 /* Combine two integer constants ARG1 and ARG2 under operation CODE
976 to produce a new constant. Return NULL_TREE if we don't know how
977 to evaluate CODE at compile-time. */
979 static tree
980 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
981 int overflowable)
983 double_int op1, op2, res, tmp;
984 tree t;
985 tree type = TREE_TYPE (arg1);
986 bool uns = TYPE_UNSIGNED (type);
987 bool overflow = false;
989 op1 = tree_to_double_int (arg1);
990 op2 = tree_to_double_int (arg2);
992 switch (code)
994 case BIT_IOR_EXPR:
995 res = op1 | op2;
996 break;
998 case BIT_XOR_EXPR:
999 res = op1 ^ op2;
1000 break;
1002 case BIT_AND_EXPR:
1003 res = op1 & op2;
1004 break;
1006 case RSHIFT_EXPR:
1007 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1008 break;
1010 case LSHIFT_EXPR:
1011 /* It's unclear from the C standard whether shifts can overflow.
1012 The following code ignores overflow; perhaps a C standard
1013 interpretation ruling is needed. */
1014 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1015 break;
1017 case RROTATE_EXPR:
1018 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1019 break;
1021 case LROTATE_EXPR:
1022 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1023 break;
1025 case PLUS_EXPR:
1026 res = op1.add_with_sign (op2, false, &overflow);
1027 break;
1029 case MINUS_EXPR:
1030 res = op1.sub_with_overflow (op2, &overflow);
1031 break;
1033 case MULT_EXPR:
1034 res = op1.mul_with_sign (op2, false, &overflow);
1035 break;
1037 case MULT_HIGHPART_EXPR:
1038 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1040 bool dummy_overflow;
1041 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1042 return NULL_TREE;
1043 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1045 else
1047 bool dummy_overflow;
1048 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1049 is performed in twice the precision of arguments. */
1050 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1051 res = tmp.rshift (TYPE_PRECISION (type),
1052 2 * TYPE_PRECISION (type), !uns);
1054 break;
1056 case TRUNC_DIV_EXPR:
1057 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1058 case EXACT_DIV_EXPR:
1059 /* This is a shortcut for a common special case. */
1060 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1061 && !TREE_OVERFLOW (arg1)
1062 && !TREE_OVERFLOW (arg2)
1063 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1065 if (code == CEIL_DIV_EXPR)
1066 op1.low += op2.low - 1;
1068 res.low = op1.low / op2.low, res.high = 0;
1069 break;
1072 /* ... fall through ... */
1074 case ROUND_DIV_EXPR:
1075 if (op2.is_zero ())
1076 return NULL_TREE;
1077 if (op2.is_one ())
1079 res = op1;
1080 break;
1082 if (op1 == op2 && !op1.is_zero ())
1084 res = double_int_one;
1085 break;
1087 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1088 break;
1090 case TRUNC_MOD_EXPR:
1091 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1092 /* This is a shortcut for a common special case. */
1093 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1094 && !TREE_OVERFLOW (arg1)
1095 && !TREE_OVERFLOW (arg2)
1096 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1098 if (code == CEIL_MOD_EXPR)
1099 op1.low += op2.low - 1;
1100 res.low = op1.low % op2.low, res.high = 0;
1101 break;
1104 /* ... fall through ... */
1106 case ROUND_MOD_EXPR:
1107 if (op2.is_zero ())
1108 return NULL_TREE;
1110 /* Check for the case the case of INT_MIN % -1 and return
1111 overflow and result = 0. The TImode case is handled properly
1112 in double-int. */
1113 if (TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT
1114 && !uns
1115 && op2.is_minus_one ()
1116 && op1.high == (HOST_WIDE_INT) -1
1117 && (HOST_WIDE_INT) op1.low
1118 == (((HOST_WIDE_INT)-1) << (TYPE_PRECISION (type) - 1)))
1120 overflow = 1;
1121 res = double_int_zero;
1123 else
1124 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1125 break;
1127 case MIN_EXPR:
1128 res = op1.min (op2, uns);
1129 break;
1131 case MAX_EXPR:
1132 res = op1.max (op2, uns);
1133 break;
1135 default:
1136 return NULL_TREE;
1139 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1140 (!uns && overflow)
1141 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1143 return t;
1146 tree
1147 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1149 return int_const_binop_1 (code, arg1, arg2, 1);
1152 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1153 constant. We assume ARG1 and ARG2 have the same data type, or at least
1154 are the same kind of constant and the same machine mode. Return zero if
1155 combining the constants is not allowed in the current operating mode. */
1157 static tree
1158 const_binop (enum tree_code code, tree arg1, tree arg2)
1160 /* Sanity check for the recursive cases. */
1161 if (!arg1 || !arg2)
1162 return NULL_TREE;
1164 STRIP_NOPS (arg1);
1165 STRIP_NOPS (arg2);
1167 if (TREE_CODE (arg1) == INTEGER_CST)
1168 return int_const_binop (code, arg1, arg2);
1170 if (TREE_CODE (arg1) == REAL_CST)
1172 enum machine_mode mode;
1173 REAL_VALUE_TYPE d1;
1174 REAL_VALUE_TYPE d2;
1175 REAL_VALUE_TYPE value;
1176 REAL_VALUE_TYPE result;
1177 bool inexact;
1178 tree t, type;
1180 /* The following codes are handled by real_arithmetic. */
1181 switch (code)
1183 case PLUS_EXPR:
1184 case MINUS_EXPR:
1185 case MULT_EXPR:
1186 case RDIV_EXPR:
1187 case MIN_EXPR:
1188 case MAX_EXPR:
1189 break;
1191 default:
1192 return NULL_TREE;
1195 d1 = TREE_REAL_CST (arg1);
1196 d2 = TREE_REAL_CST (arg2);
1198 type = TREE_TYPE (arg1);
1199 mode = TYPE_MODE (type);
1201 /* Don't perform operation if we honor signaling NaNs and
1202 either operand is a NaN. */
1203 if (HONOR_SNANS (mode)
1204 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1205 return NULL_TREE;
1207 /* Don't perform operation if it would raise a division
1208 by zero exception. */
1209 if (code == RDIV_EXPR
1210 && REAL_VALUES_EQUAL (d2, dconst0)
1211 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1212 return NULL_TREE;
1214 /* If either operand is a NaN, just return it. Otherwise, set up
1215 for floating-point trap; we return an overflow. */
1216 if (REAL_VALUE_ISNAN (d1))
1217 return arg1;
1218 else if (REAL_VALUE_ISNAN (d2))
1219 return arg2;
1221 inexact = real_arithmetic (&value, code, &d1, &d2);
1222 real_convert (&result, mode, &value);
1224 /* Don't constant fold this floating point operation if
1225 the result has overflowed and flag_trapping_math. */
1226 if (flag_trapping_math
1227 && MODE_HAS_INFINITIES (mode)
1228 && REAL_VALUE_ISINF (result)
1229 && !REAL_VALUE_ISINF (d1)
1230 && !REAL_VALUE_ISINF (d2))
1231 return NULL_TREE;
1233 /* Don't constant fold this floating point operation if the
1234 result may dependent upon the run-time rounding mode and
1235 flag_rounding_math is set, or if GCC's software emulation
1236 is unable to accurately represent the result. */
1237 if ((flag_rounding_math
1238 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1239 && (inexact || !real_identical (&result, &value)))
1240 return NULL_TREE;
1242 t = build_real (type, result);
1244 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1245 return t;
1248 if (TREE_CODE (arg1) == FIXED_CST)
1250 FIXED_VALUE_TYPE f1;
1251 FIXED_VALUE_TYPE f2;
1252 FIXED_VALUE_TYPE result;
1253 tree t, type;
1254 int sat_p;
1255 bool overflow_p;
1257 /* The following codes are handled by fixed_arithmetic. */
1258 switch (code)
1260 case PLUS_EXPR:
1261 case MINUS_EXPR:
1262 case MULT_EXPR:
1263 case TRUNC_DIV_EXPR:
1264 f2 = TREE_FIXED_CST (arg2);
1265 break;
1267 case LSHIFT_EXPR:
1268 case RSHIFT_EXPR:
1269 f2.data.high = TREE_INT_CST_HIGH (arg2);
1270 f2.data.low = TREE_INT_CST_LOW (arg2);
1271 f2.mode = SImode;
1272 break;
1274 default:
1275 return NULL_TREE;
1278 f1 = TREE_FIXED_CST (arg1);
1279 type = TREE_TYPE (arg1);
1280 sat_p = TYPE_SATURATING (type);
1281 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1282 t = build_fixed (type, result);
1283 /* Propagate overflow flags. */
1284 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1285 TREE_OVERFLOW (t) = 1;
1286 return t;
1289 if (TREE_CODE (arg1) == COMPLEX_CST)
1291 tree type = TREE_TYPE (arg1);
1292 tree r1 = TREE_REALPART (arg1);
1293 tree i1 = TREE_IMAGPART (arg1);
1294 tree r2 = TREE_REALPART (arg2);
1295 tree i2 = TREE_IMAGPART (arg2);
1296 tree real, imag;
1298 switch (code)
1300 case PLUS_EXPR:
1301 case MINUS_EXPR:
1302 real = const_binop (code, r1, r2);
1303 imag = const_binop (code, i1, i2);
1304 break;
1306 case MULT_EXPR:
1307 if (COMPLEX_FLOAT_TYPE_P (type))
1308 return do_mpc_arg2 (arg1, arg2, type,
1309 /* do_nonfinite= */ folding_initializer,
1310 mpc_mul);
1312 real = const_binop (MINUS_EXPR,
1313 const_binop (MULT_EXPR, r1, r2),
1314 const_binop (MULT_EXPR, i1, i2));
1315 imag = const_binop (PLUS_EXPR,
1316 const_binop (MULT_EXPR, r1, i2),
1317 const_binop (MULT_EXPR, i1, r2));
1318 break;
1320 case RDIV_EXPR:
1321 if (COMPLEX_FLOAT_TYPE_P (type))
1322 return do_mpc_arg2 (arg1, arg2, type,
1323 /* do_nonfinite= */ folding_initializer,
1324 mpc_div);
1325 /* Fallthru ... */
1326 case TRUNC_DIV_EXPR:
1327 case CEIL_DIV_EXPR:
1328 case FLOOR_DIV_EXPR:
1329 case ROUND_DIV_EXPR:
1330 if (flag_complex_method == 0)
1332 /* Keep this algorithm in sync with
1333 tree-complex.c:expand_complex_div_straight().
1335 Expand complex division to scalars, straightforward algorithm.
1336 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1337 t = br*br + bi*bi
1339 tree magsquared
1340 = const_binop (PLUS_EXPR,
1341 const_binop (MULT_EXPR, r2, r2),
1342 const_binop (MULT_EXPR, i2, i2));
1343 tree t1
1344 = const_binop (PLUS_EXPR,
1345 const_binop (MULT_EXPR, r1, r2),
1346 const_binop (MULT_EXPR, i1, i2));
1347 tree t2
1348 = const_binop (MINUS_EXPR,
1349 const_binop (MULT_EXPR, i1, r2),
1350 const_binop (MULT_EXPR, r1, i2));
1352 real = const_binop (code, t1, magsquared);
1353 imag = const_binop (code, t2, magsquared);
1355 else
1357 /* Keep this algorithm in sync with
1358 tree-complex.c:expand_complex_div_wide().
1360 Expand complex division to scalars, modified algorithm to minimize
1361 overflow with wide input ranges. */
1362 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1363 fold_abs_const (r2, TREE_TYPE (type)),
1364 fold_abs_const (i2, TREE_TYPE (type)));
1366 if (integer_nonzerop (compare))
1368 /* In the TRUE branch, we compute
1369 ratio = br/bi;
1370 div = (br * ratio) + bi;
1371 tr = (ar * ratio) + ai;
1372 ti = (ai * ratio) - ar;
1373 tr = tr / div;
1374 ti = ti / div; */
1375 tree ratio = const_binop (code, r2, i2);
1376 tree div = const_binop (PLUS_EXPR, i2,
1377 const_binop (MULT_EXPR, r2, ratio));
1378 real = const_binop (MULT_EXPR, r1, ratio);
1379 real = const_binop (PLUS_EXPR, real, i1);
1380 real = const_binop (code, real, div);
1382 imag = const_binop (MULT_EXPR, i1, ratio);
1383 imag = const_binop (MINUS_EXPR, imag, r1);
1384 imag = const_binop (code, imag, div);
1386 else
1388 /* In the FALSE branch, we compute
1389 ratio = d/c;
1390 divisor = (d * ratio) + c;
1391 tr = (b * ratio) + a;
1392 ti = b - (a * ratio);
1393 tr = tr / div;
1394 ti = ti / div; */
1395 tree ratio = const_binop (code, i2, r2);
1396 tree div = const_binop (PLUS_EXPR, r2,
1397 const_binop (MULT_EXPR, i2, ratio));
1399 real = const_binop (MULT_EXPR, i1, ratio);
1400 real = const_binop (PLUS_EXPR, real, r1);
1401 real = const_binop (code, real, div);
1403 imag = const_binop (MULT_EXPR, r1, ratio);
1404 imag = const_binop (MINUS_EXPR, i1, imag);
1405 imag = const_binop (code, imag, div);
1408 break;
1410 default:
1411 return NULL_TREE;
1414 if (real && imag)
1415 return build_complex (type, real, imag);
1418 if (TREE_CODE (arg1) == VECTOR_CST
1419 && TREE_CODE (arg2) == VECTOR_CST)
1421 tree type = TREE_TYPE (arg1);
1422 int count = TYPE_VECTOR_SUBPARTS (type), i;
1423 tree *elts = XALLOCAVEC (tree, count);
1425 for (i = 0; i < count; i++)
1427 tree elem1 = VECTOR_CST_ELT (arg1, i);
1428 tree elem2 = VECTOR_CST_ELT (arg2, i);
1430 elts[i] = const_binop (code, elem1, elem2);
1432 /* It is possible that const_binop cannot handle the given
1433 code and return NULL_TREE */
1434 if (elts[i] == NULL_TREE)
1435 return NULL_TREE;
1438 return build_vector (type, elts);
1441 /* Shifts allow a scalar offset for a vector. */
1442 if (TREE_CODE (arg1) == VECTOR_CST
1443 && TREE_CODE (arg2) == INTEGER_CST)
1445 tree type = TREE_TYPE (arg1);
1446 int count = TYPE_VECTOR_SUBPARTS (type), i;
1447 tree *elts = XALLOCAVEC (tree, count);
1449 if (code == VEC_LSHIFT_EXPR
1450 || code == VEC_RSHIFT_EXPR)
1452 if (!tree_fits_uhwi_p (arg2))
1453 return NULL_TREE;
1455 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1456 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1457 unsigned HOST_WIDE_INT innerc
1458 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1459 if (shiftc >= outerc || (shiftc % innerc) != 0)
1460 return NULL_TREE;
1461 int offset = shiftc / innerc;
1462 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1463 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1464 for !BYTES_BIG_ENDIAN picks first vector element, but
1465 for BYTES_BIG_ENDIAN last element from the vector. */
1466 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1467 offset = -offset;
1468 tree zero = build_zero_cst (TREE_TYPE (type));
1469 for (i = 0; i < count; i++)
1471 if (i + offset < 0 || i + offset >= count)
1472 elts[i] = zero;
1473 else
1474 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1477 else
1478 for (i = 0; i < count; i++)
1480 tree elem1 = VECTOR_CST_ELT (arg1, i);
1482 elts[i] = const_binop (code, elem1, arg2);
1484 /* It is possible that const_binop cannot handle the given
1485 code and return NULL_TREE */
1486 if (elts[i] == NULL_TREE)
1487 return NULL_TREE;
1490 return build_vector (type, elts);
1492 return NULL_TREE;
1495 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1496 indicates which particular sizetype to create. */
1498 tree
1499 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1501 return build_int_cst (sizetype_tab[(int) kind], number);
1504 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1505 is a tree code. The type of the result is taken from the operands.
1506 Both must be equivalent integer types, ala int_binop_types_match_p.
1507 If the operands are constant, so is the result. */
1509 tree
1510 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1512 tree type = TREE_TYPE (arg0);
1514 if (arg0 == error_mark_node || arg1 == error_mark_node)
1515 return error_mark_node;
1517 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1518 TREE_TYPE (arg1)));
1520 /* Handle the special case of two integer constants faster. */
1521 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1523 /* And some specific cases even faster than that. */
1524 if (code == PLUS_EXPR)
1526 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1527 return arg1;
1528 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1529 return arg0;
1531 else if (code == MINUS_EXPR)
1533 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1534 return arg0;
1536 else if (code == MULT_EXPR)
1538 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1539 return arg1;
1542 /* Handle general case of two integer constants. For sizetype
1543 constant calculations we always want to know about overflow,
1544 even in the unsigned case. */
1545 return int_const_binop_1 (code, arg0, arg1, -1);
1548 return fold_build2_loc (loc, code, type, arg0, arg1);
1551 /* Given two values, either both of sizetype or both of bitsizetype,
1552 compute the difference between the two values. Return the value
1553 in signed type corresponding to the type of the operands. */
1555 tree
1556 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1558 tree type = TREE_TYPE (arg0);
1559 tree ctype;
1561 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1562 TREE_TYPE (arg1)));
1564 /* If the type is already signed, just do the simple thing. */
1565 if (!TYPE_UNSIGNED (type))
1566 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1568 if (type == sizetype)
1569 ctype = ssizetype;
1570 else if (type == bitsizetype)
1571 ctype = sbitsizetype;
1572 else
1573 ctype = signed_type_for (type);
1575 /* If either operand is not a constant, do the conversions to the signed
1576 type and subtract. The hardware will do the right thing with any
1577 overflow in the subtraction. */
1578 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1579 return size_binop_loc (loc, MINUS_EXPR,
1580 fold_convert_loc (loc, ctype, arg0),
1581 fold_convert_loc (loc, ctype, arg1));
1583 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1584 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1585 overflow) and negate (which can't either). Special-case a result
1586 of zero while we're here. */
1587 if (tree_int_cst_equal (arg0, arg1))
1588 return build_int_cst (ctype, 0);
1589 else if (tree_int_cst_lt (arg1, arg0))
1590 return fold_convert_loc (loc, ctype,
1591 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1592 else
1593 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1594 fold_convert_loc (loc, ctype,
1595 size_binop_loc (loc,
1596 MINUS_EXPR,
1597 arg1, arg0)));
1600 /* A subroutine of fold_convert_const handling conversions of an
1601 INTEGER_CST to another integer type. */
1603 static tree
1604 fold_convert_const_int_from_int (tree type, const_tree arg1)
1606 tree t;
1608 /* Given an integer constant, make new constant with new type,
1609 appropriately sign-extended or truncated. */
1610 t = force_fit_type_double (type, tree_to_double_int (arg1),
1611 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1612 (TREE_INT_CST_HIGH (arg1) < 0
1613 && (TYPE_UNSIGNED (type)
1614 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1615 | TREE_OVERFLOW (arg1));
1617 return t;
1620 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1621 to an integer type. */
1623 static tree
1624 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1626 int overflow = 0;
1627 tree t;
1629 /* The following code implements the floating point to integer
1630 conversion rules required by the Java Language Specification,
1631 that IEEE NaNs are mapped to zero and values that overflow
1632 the target precision saturate, i.e. values greater than
1633 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1634 are mapped to INT_MIN. These semantics are allowed by the
1635 C and C++ standards that simply state that the behavior of
1636 FP-to-integer conversion is unspecified upon overflow. */
1638 double_int val;
1639 REAL_VALUE_TYPE r;
1640 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1642 switch (code)
1644 case FIX_TRUNC_EXPR:
1645 real_trunc (&r, VOIDmode, &x);
1646 break;
1648 default:
1649 gcc_unreachable ();
1652 /* If R is NaN, return zero and show we have an overflow. */
1653 if (REAL_VALUE_ISNAN (r))
1655 overflow = 1;
1656 val = double_int_zero;
1659 /* See if R is less than the lower bound or greater than the
1660 upper bound. */
1662 if (! overflow)
1664 tree lt = TYPE_MIN_VALUE (type);
1665 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1666 if (REAL_VALUES_LESS (r, l))
1668 overflow = 1;
1669 val = tree_to_double_int (lt);
1673 if (! overflow)
1675 tree ut = TYPE_MAX_VALUE (type);
1676 if (ut)
1678 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1679 if (REAL_VALUES_LESS (u, r))
1681 overflow = 1;
1682 val = tree_to_double_int (ut);
1687 if (! overflow)
1688 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1690 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1691 return t;
1694 /* A subroutine of fold_convert_const handling conversions of a
1695 FIXED_CST to an integer type. */
1697 static tree
1698 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1700 tree t;
1701 double_int temp, temp_trunc;
1702 unsigned int mode;
1704 /* Right shift FIXED_CST to temp by fbit. */
1705 temp = TREE_FIXED_CST (arg1).data;
1706 mode = TREE_FIXED_CST (arg1).mode;
1707 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1709 temp = temp.rshift (GET_MODE_FBIT (mode),
1710 HOST_BITS_PER_DOUBLE_INT,
1711 SIGNED_FIXED_POINT_MODE_P (mode));
1713 /* Left shift temp to temp_trunc by fbit. */
1714 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1715 HOST_BITS_PER_DOUBLE_INT,
1716 SIGNED_FIXED_POINT_MODE_P (mode));
1718 else
1720 temp = double_int_zero;
1721 temp_trunc = double_int_zero;
1724 /* If FIXED_CST is negative, we need to round the value toward 0.
1725 By checking if the fractional bits are not zero to add 1 to temp. */
1726 if (SIGNED_FIXED_POINT_MODE_P (mode)
1727 && temp_trunc.is_negative ()
1728 && TREE_FIXED_CST (arg1).data != temp_trunc)
1729 temp += double_int_one;
1731 /* Given a fixed-point constant, make new constant with new type,
1732 appropriately sign-extended or truncated. */
1733 t = force_fit_type_double (type, temp, -1,
1734 (temp.is_negative ()
1735 && (TYPE_UNSIGNED (type)
1736 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1737 | TREE_OVERFLOW (arg1));
1739 return t;
1742 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1743 to another floating point type. */
1745 static tree
1746 fold_convert_const_real_from_real (tree type, const_tree arg1)
1748 REAL_VALUE_TYPE value;
1749 tree t;
1751 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1752 t = build_real (type, value);
1754 /* If converting an infinity or NAN to a representation that doesn't
1755 have one, set the overflow bit so that we can produce some kind of
1756 error message at the appropriate point if necessary. It's not the
1757 most user-friendly message, but it's better than nothing. */
1758 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1759 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1760 TREE_OVERFLOW (t) = 1;
1761 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1762 && !MODE_HAS_NANS (TYPE_MODE (type)))
1763 TREE_OVERFLOW (t) = 1;
1764 /* Regular overflow, conversion produced an infinity in a mode that
1765 can't represent them. */
1766 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1767 && REAL_VALUE_ISINF (value)
1768 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1769 TREE_OVERFLOW (t) = 1;
1770 else
1771 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1772 return t;
1775 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1776 to a floating point type. */
1778 static tree
1779 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1781 REAL_VALUE_TYPE value;
1782 tree t;
1784 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1785 t = build_real (type, value);
1787 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1788 return t;
1791 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1792 to another fixed-point type. */
1794 static tree
1795 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1797 FIXED_VALUE_TYPE value;
1798 tree t;
1799 bool overflow_p;
1801 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1802 TYPE_SATURATING (type));
1803 t = build_fixed (type, value);
1805 /* Propagate overflow flags. */
1806 if (overflow_p | TREE_OVERFLOW (arg1))
1807 TREE_OVERFLOW (t) = 1;
1808 return t;
1811 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1812 to a fixed-point type. */
1814 static tree
1815 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1817 FIXED_VALUE_TYPE value;
1818 tree t;
1819 bool overflow_p;
1821 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1822 TREE_INT_CST (arg1),
1823 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1824 TYPE_SATURATING (type));
1825 t = build_fixed (type, value);
1827 /* Propagate overflow flags. */
1828 if (overflow_p | TREE_OVERFLOW (arg1))
1829 TREE_OVERFLOW (t) = 1;
1830 return t;
1833 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1834 to a fixed-point type. */
1836 static tree
1837 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1839 FIXED_VALUE_TYPE value;
1840 tree t;
1841 bool overflow_p;
1843 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1844 &TREE_REAL_CST (arg1),
1845 TYPE_SATURATING (type));
1846 t = build_fixed (type, value);
1848 /* Propagate overflow flags. */
1849 if (overflow_p | TREE_OVERFLOW (arg1))
1850 TREE_OVERFLOW (t) = 1;
1851 return t;
1854 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1855 type TYPE. If no simplification can be done return NULL_TREE. */
1857 static tree
1858 fold_convert_const (enum tree_code code, tree type, tree arg1)
1860 if (TREE_TYPE (arg1) == type)
1861 return arg1;
1863 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1864 || TREE_CODE (type) == OFFSET_TYPE)
1866 if (TREE_CODE (arg1) == INTEGER_CST)
1867 return fold_convert_const_int_from_int (type, arg1);
1868 else if (TREE_CODE (arg1) == REAL_CST)
1869 return fold_convert_const_int_from_real (code, type, arg1);
1870 else if (TREE_CODE (arg1) == FIXED_CST)
1871 return fold_convert_const_int_from_fixed (type, arg1);
1873 else if (TREE_CODE (type) == REAL_TYPE)
1875 if (TREE_CODE (arg1) == INTEGER_CST)
1876 return build_real_from_int_cst (type, arg1);
1877 else if (TREE_CODE (arg1) == REAL_CST)
1878 return fold_convert_const_real_from_real (type, arg1);
1879 else if (TREE_CODE (arg1) == FIXED_CST)
1880 return fold_convert_const_real_from_fixed (type, arg1);
1882 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1884 if (TREE_CODE (arg1) == FIXED_CST)
1885 return fold_convert_const_fixed_from_fixed (type, arg1);
1886 else if (TREE_CODE (arg1) == INTEGER_CST)
1887 return fold_convert_const_fixed_from_int (type, arg1);
1888 else if (TREE_CODE (arg1) == REAL_CST)
1889 return fold_convert_const_fixed_from_real (type, arg1);
1891 return NULL_TREE;
1894 /* Construct a vector of zero elements of vector type TYPE. */
1896 static tree
1897 build_zero_vector (tree type)
1899 tree t;
1901 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1902 return build_vector_from_val (type, t);
1905 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1907 bool
1908 fold_convertible_p (const_tree type, const_tree arg)
1910 tree orig = TREE_TYPE (arg);
1912 if (type == orig)
1913 return true;
1915 if (TREE_CODE (arg) == ERROR_MARK
1916 || TREE_CODE (type) == ERROR_MARK
1917 || TREE_CODE (orig) == ERROR_MARK)
1918 return false;
1920 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1921 return true;
1923 switch (TREE_CODE (type))
1925 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1926 case POINTER_TYPE: case REFERENCE_TYPE:
1927 case OFFSET_TYPE:
1928 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1929 || TREE_CODE (orig) == OFFSET_TYPE)
1930 return true;
1931 return (TREE_CODE (orig) == VECTOR_TYPE
1932 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1934 case REAL_TYPE:
1935 case FIXED_POINT_TYPE:
1936 case COMPLEX_TYPE:
1937 case VECTOR_TYPE:
1938 case VOID_TYPE:
1939 return TREE_CODE (type) == TREE_CODE (orig);
1941 default:
1942 return false;
1946 /* Convert expression ARG to type TYPE. Used by the middle-end for
1947 simple conversions in preference to calling the front-end's convert. */
1949 tree
1950 fold_convert_loc (location_t loc, tree type, tree arg)
1952 tree orig = TREE_TYPE (arg);
1953 tree tem;
1955 if (type == orig)
1956 return arg;
1958 if (TREE_CODE (arg) == ERROR_MARK
1959 || TREE_CODE (type) == ERROR_MARK
1960 || TREE_CODE (orig) == ERROR_MARK)
1961 return error_mark_node;
1963 switch (TREE_CODE (type))
1965 case POINTER_TYPE:
1966 case REFERENCE_TYPE:
1967 /* Handle conversions between pointers to different address spaces. */
1968 if (POINTER_TYPE_P (orig)
1969 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1970 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1971 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1972 /* fall through */
1974 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1975 case OFFSET_TYPE:
1976 if (TREE_CODE (arg) == INTEGER_CST)
1978 tem = fold_convert_const (NOP_EXPR, type, arg);
1979 if (tem != NULL_TREE)
1980 return tem;
1982 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1983 || TREE_CODE (orig) == OFFSET_TYPE)
1984 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1985 if (TREE_CODE (orig) == COMPLEX_TYPE)
1986 return fold_convert_loc (loc, type,
1987 fold_build1_loc (loc, REALPART_EXPR,
1988 TREE_TYPE (orig), arg));
1989 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1990 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1991 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1993 case REAL_TYPE:
1994 if (TREE_CODE (arg) == INTEGER_CST)
1996 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1997 if (tem != NULL_TREE)
1998 return tem;
2000 else if (TREE_CODE (arg) == REAL_CST)
2002 tem = fold_convert_const (NOP_EXPR, type, arg);
2003 if (tem != NULL_TREE)
2004 return tem;
2006 else if (TREE_CODE (arg) == FIXED_CST)
2008 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2009 if (tem != NULL_TREE)
2010 return tem;
2013 switch (TREE_CODE (orig))
2015 case INTEGER_TYPE:
2016 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2017 case POINTER_TYPE: case REFERENCE_TYPE:
2018 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2020 case REAL_TYPE:
2021 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2023 case FIXED_POINT_TYPE:
2024 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2026 case COMPLEX_TYPE:
2027 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2028 return fold_convert_loc (loc, type, tem);
2030 default:
2031 gcc_unreachable ();
2034 case FIXED_POINT_TYPE:
2035 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2036 || TREE_CODE (arg) == REAL_CST)
2038 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2039 if (tem != NULL_TREE)
2040 goto fold_convert_exit;
2043 switch (TREE_CODE (orig))
2045 case FIXED_POINT_TYPE:
2046 case INTEGER_TYPE:
2047 case ENUMERAL_TYPE:
2048 case BOOLEAN_TYPE:
2049 case REAL_TYPE:
2050 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2052 case COMPLEX_TYPE:
2053 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2054 return fold_convert_loc (loc, type, tem);
2056 default:
2057 gcc_unreachable ();
2060 case COMPLEX_TYPE:
2061 switch (TREE_CODE (orig))
2063 case INTEGER_TYPE:
2064 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2065 case POINTER_TYPE: case REFERENCE_TYPE:
2066 case REAL_TYPE:
2067 case FIXED_POINT_TYPE:
2068 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2069 fold_convert_loc (loc, TREE_TYPE (type), arg),
2070 fold_convert_loc (loc, TREE_TYPE (type),
2071 integer_zero_node));
2072 case COMPLEX_TYPE:
2074 tree rpart, ipart;
2076 if (TREE_CODE (arg) == COMPLEX_EXPR)
2078 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2079 TREE_OPERAND (arg, 0));
2080 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2081 TREE_OPERAND (arg, 1));
2082 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2085 arg = save_expr (arg);
2086 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2087 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2088 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2089 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2090 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2093 default:
2094 gcc_unreachable ();
2097 case VECTOR_TYPE:
2098 if (integer_zerop (arg))
2099 return build_zero_vector (type);
2100 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2101 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2102 || TREE_CODE (orig) == VECTOR_TYPE);
2103 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2105 case VOID_TYPE:
2106 tem = fold_ignored_result (arg);
2107 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2109 default:
2110 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2111 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2112 gcc_unreachable ();
2114 fold_convert_exit:
2115 protected_set_expr_location_unshare (tem, loc);
2116 return tem;
2119 /* Return false if expr can be assumed not to be an lvalue, true
2120 otherwise. */
2122 static bool
2123 maybe_lvalue_p (const_tree x)
2125 /* We only need to wrap lvalue tree codes. */
2126 switch (TREE_CODE (x))
2128 case VAR_DECL:
2129 case PARM_DECL:
2130 case RESULT_DECL:
2131 case LABEL_DECL:
2132 case FUNCTION_DECL:
2133 case SSA_NAME:
2135 case COMPONENT_REF:
2136 case MEM_REF:
2137 case INDIRECT_REF:
2138 case ARRAY_REF:
2139 case ARRAY_RANGE_REF:
2140 case BIT_FIELD_REF:
2141 case OBJ_TYPE_REF:
2143 case REALPART_EXPR:
2144 case IMAGPART_EXPR:
2145 case PREINCREMENT_EXPR:
2146 case PREDECREMENT_EXPR:
2147 case SAVE_EXPR:
2148 case TRY_CATCH_EXPR:
2149 case WITH_CLEANUP_EXPR:
2150 case COMPOUND_EXPR:
2151 case MODIFY_EXPR:
2152 case TARGET_EXPR:
2153 case COND_EXPR:
2154 case BIND_EXPR:
2155 break;
2157 default:
2158 /* Assume the worst for front-end tree codes. */
2159 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2160 break;
2161 return false;
2164 return true;
2167 /* Return an expr equal to X but certainly not valid as an lvalue. */
2169 tree
2170 non_lvalue_loc (location_t loc, tree x)
2172 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2173 us. */
2174 if (in_gimple_form)
2175 return x;
2177 if (! maybe_lvalue_p (x))
2178 return x;
2179 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2182 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2183 Zero means allow extended lvalues. */
2185 int pedantic_lvalues;
2187 /* When pedantic, return an expr equal to X but certainly not valid as a
2188 pedantic lvalue. Otherwise, return X. */
2190 static tree
2191 pedantic_non_lvalue_loc (location_t loc, tree x)
2193 if (pedantic_lvalues)
2194 return non_lvalue_loc (loc, x);
2196 return protected_set_expr_location_unshare (x, loc);
2199 /* Given a tree comparison code, return the code that is the logical inverse.
2200 It is generally not safe to do this for floating-point comparisons, except
2201 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2202 ERROR_MARK in this case. */
2204 enum tree_code
2205 invert_tree_comparison (enum tree_code code, bool honor_nans)
2207 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2208 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2209 return ERROR_MARK;
2211 switch (code)
2213 case EQ_EXPR:
2214 return NE_EXPR;
2215 case NE_EXPR:
2216 return EQ_EXPR;
2217 case GT_EXPR:
2218 return honor_nans ? UNLE_EXPR : LE_EXPR;
2219 case GE_EXPR:
2220 return honor_nans ? UNLT_EXPR : LT_EXPR;
2221 case LT_EXPR:
2222 return honor_nans ? UNGE_EXPR : GE_EXPR;
2223 case LE_EXPR:
2224 return honor_nans ? UNGT_EXPR : GT_EXPR;
2225 case LTGT_EXPR:
2226 return UNEQ_EXPR;
2227 case UNEQ_EXPR:
2228 return LTGT_EXPR;
2229 case UNGT_EXPR:
2230 return LE_EXPR;
2231 case UNGE_EXPR:
2232 return LT_EXPR;
2233 case UNLT_EXPR:
2234 return GE_EXPR;
2235 case UNLE_EXPR:
2236 return GT_EXPR;
2237 case ORDERED_EXPR:
2238 return UNORDERED_EXPR;
2239 case UNORDERED_EXPR:
2240 return ORDERED_EXPR;
2241 default:
2242 gcc_unreachable ();
2246 /* Similar, but return the comparison that results if the operands are
2247 swapped. This is safe for floating-point. */
2249 enum tree_code
2250 swap_tree_comparison (enum tree_code code)
2252 switch (code)
2254 case EQ_EXPR:
2255 case NE_EXPR:
2256 case ORDERED_EXPR:
2257 case UNORDERED_EXPR:
2258 case LTGT_EXPR:
2259 case UNEQ_EXPR:
2260 return code;
2261 case GT_EXPR:
2262 return LT_EXPR;
2263 case GE_EXPR:
2264 return LE_EXPR;
2265 case LT_EXPR:
2266 return GT_EXPR;
2267 case LE_EXPR:
2268 return GE_EXPR;
2269 case UNGT_EXPR:
2270 return UNLT_EXPR;
2271 case UNGE_EXPR:
2272 return UNLE_EXPR;
2273 case UNLT_EXPR:
2274 return UNGT_EXPR;
2275 case UNLE_EXPR:
2276 return UNGE_EXPR;
2277 default:
2278 gcc_unreachable ();
2283 /* Convert a comparison tree code from an enum tree_code representation
2284 into a compcode bit-based encoding. This function is the inverse of
2285 compcode_to_comparison. */
2287 static enum comparison_code
2288 comparison_to_compcode (enum tree_code code)
2290 switch (code)
2292 case LT_EXPR:
2293 return COMPCODE_LT;
2294 case EQ_EXPR:
2295 return COMPCODE_EQ;
2296 case LE_EXPR:
2297 return COMPCODE_LE;
2298 case GT_EXPR:
2299 return COMPCODE_GT;
2300 case NE_EXPR:
2301 return COMPCODE_NE;
2302 case GE_EXPR:
2303 return COMPCODE_GE;
2304 case ORDERED_EXPR:
2305 return COMPCODE_ORD;
2306 case UNORDERED_EXPR:
2307 return COMPCODE_UNORD;
2308 case UNLT_EXPR:
2309 return COMPCODE_UNLT;
2310 case UNEQ_EXPR:
2311 return COMPCODE_UNEQ;
2312 case UNLE_EXPR:
2313 return COMPCODE_UNLE;
2314 case UNGT_EXPR:
2315 return COMPCODE_UNGT;
2316 case LTGT_EXPR:
2317 return COMPCODE_LTGT;
2318 case UNGE_EXPR:
2319 return COMPCODE_UNGE;
2320 default:
2321 gcc_unreachable ();
2325 /* Convert a compcode bit-based encoding of a comparison operator back
2326 to GCC's enum tree_code representation. This function is the
2327 inverse of comparison_to_compcode. */
2329 static enum tree_code
2330 compcode_to_comparison (enum comparison_code code)
2332 switch (code)
2334 case COMPCODE_LT:
2335 return LT_EXPR;
2336 case COMPCODE_EQ:
2337 return EQ_EXPR;
2338 case COMPCODE_LE:
2339 return LE_EXPR;
2340 case COMPCODE_GT:
2341 return GT_EXPR;
2342 case COMPCODE_NE:
2343 return NE_EXPR;
2344 case COMPCODE_GE:
2345 return GE_EXPR;
2346 case COMPCODE_ORD:
2347 return ORDERED_EXPR;
2348 case COMPCODE_UNORD:
2349 return UNORDERED_EXPR;
2350 case COMPCODE_UNLT:
2351 return UNLT_EXPR;
2352 case COMPCODE_UNEQ:
2353 return UNEQ_EXPR;
2354 case COMPCODE_UNLE:
2355 return UNLE_EXPR;
2356 case COMPCODE_UNGT:
2357 return UNGT_EXPR;
2358 case COMPCODE_LTGT:
2359 return LTGT_EXPR;
2360 case COMPCODE_UNGE:
2361 return UNGE_EXPR;
2362 default:
2363 gcc_unreachable ();
2367 /* Return a tree for the comparison which is the combination of
2368 doing the AND or OR (depending on CODE) of the two operations LCODE
2369 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2370 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2371 if this makes the transformation invalid. */
2373 tree
2374 combine_comparisons (location_t loc,
2375 enum tree_code code, enum tree_code lcode,
2376 enum tree_code rcode, tree truth_type,
2377 tree ll_arg, tree lr_arg)
2379 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2380 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2381 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2382 int compcode;
2384 switch (code)
2386 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2387 compcode = lcompcode & rcompcode;
2388 break;
2390 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2391 compcode = lcompcode | rcompcode;
2392 break;
2394 default:
2395 return NULL_TREE;
2398 if (!honor_nans)
2400 /* Eliminate unordered comparisons, as well as LTGT and ORD
2401 which are not used unless the mode has NaNs. */
2402 compcode &= ~COMPCODE_UNORD;
2403 if (compcode == COMPCODE_LTGT)
2404 compcode = COMPCODE_NE;
2405 else if (compcode == COMPCODE_ORD)
2406 compcode = COMPCODE_TRUE;
2408 else if (flag_trapping_math)
2410 /* Check that the original operation and the optimized ones will trap
2411 under the same condition. */
2412 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2413 && (lcompcode != COMPCODE_EQ)
2414 && (lcompcode != COMPCODE_ORD);
2415 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2416 && (rcompcode != COMPCODE_EQ)
2417 && (rcompcode != COMPCODE_ORD);
2418 bool trap = (compcode & COMPCODE_UNORD) == 0
2419 && (compcode != COMPCODE_EQ)
2420 && (compcode != COMPCODE_ORD);
2422 /* In a short-circuited boolean expression the LHS might be
2423 such that the RHS, if evaluated, will never trap. For
2424 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2425 if neither x nor y is NaN. (This is a mixed blessing: for
2426 example, the expression above will never trap, hence
2427 optimizing it to x < y would be invalid). */
2428 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2429 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2430 rtrap = false;
2432 /* If the comparison was short-circuited, and only the RHS
2433 trapped, we may now generate a spurious trap. */
2434 if (rtrap && !ltrap
2435 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2436 return NULL_TREE;
2438 /* If we changed the conditions that cause a trap, we lose. */
2439 if ((ltrap || rtrap) != trap)
2440 return NULL_TREE;
2443 if (compcode == COMPCODE_TRUE)
2444 return constant_boolean_node (true, truth_type);
2445 else if (compcode == COMPCODE_FALSE)
2446 return constant_boolean_node (false, truth_type);
2447 else
2449 enum tree_code tcode;
2451 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2452 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2456 /* Return nonzero if two operands (typically of the same tree node)
2457 are necessarily equal. If either argument has side-effects this
2458 function returns zero. FLAGS modifies behavior as follows:
2460 If OEP_ONLY_CONST is set, only return nonzero for constants.
2461 This function tests whether the operands are indistinguishable;
2462 it does not test whether they are equal using C's == operation.
2463 The distinction is important for IEEE floating point, because
2464 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2465 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2467 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2468 even though it may hold multiple values during a function.
2469 This is because a GCC tree node guarantees that nothing else is
2470 executed between the evaluation of its "operands" (which may often
2471 be evaluated in arbitrary order). Hence if the operands themselves
2472 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2473 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2474 unset means assuming isochronic (or instantaneous) tree equivalence.
2475 Unless comparing arbitrary expression trees, such as from different
2476 statements, this flag can usually be left unset.
2478 If OEP_PURE_SAME is set, then pure functions with identical arguments
2479 are considered the same. It is used when the caller has other ways
2480 to ensure that global memory is unchanged in between. */
2483 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2485 /* If either is ERROR_MARK, they aren't equal. */
2486 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2487 || TREE_TYPE (arg0) == error_mark_node
2488 || TREE_TYPE (arg1) == error_mark_node)
2489 return 0;
2491 /* Similar, if either does not have a type (like a released SSA name),
2492 they aren't equal. */
2493 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2494 return 0;
2496 /* Check equality of integer constants before bailing out due to
2497 precision differences. */
2498 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2499 return tree_int_cst_equal (arg0, arg1);
2501 /* If both types don't have the same signedness, then we can't consider
2502 them equal. We must check this before the STRIP_NOPS calls
2503 because they may change the signedness of the arguments. As pointers
2504 strictly don't have a signedness, require either two pointers or
2505 two non-pointers as well. */
2506 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2507 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2508 return 0;
2510 /* We cannot consider pointers to different address space equal. */
2511 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2512 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2513 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2514 return 0;
2516 /* If both types don't have the same precision, then it is not safe
2517 to strip NOPs. */
2518 if (element_precision (TREE_TYPE (arg0))
2519 != element_precision (TREE_TYPE (arg1)))
2520 return 0;
2522 STRIP_NOPS (arg0);
2523 STRIP_NOPS (arg1);
2525 /* In case both args are comparisons but with different comparison
2526 code, try to swap the comparison operands of one arg to produce
2527 a match and compare that variant. */
2528 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2529 && COMPARISON_CLASS_P (arg0)
2530 && COMPARISON_CLASS_P (arg1))
2532 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2534 if (TREE_CODE (arg0) == swap_code)
2535 return operand_equal_p (TREE_OPERAND (arg0, 0),
2536 TREE_OPERAND (arg1, 1), flags)
2537 && operand_equal_p (TREE_OPERAND (arg0, 1),
2538 TREE_OPERAND (arg1, 0), flags);
2541 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2542 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2543 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2544 return 0;
2546 /* This is needed for conversions and for COMPONENT_REF.
2547 Might as well play it safe and always test this. */
2548 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2549 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2550 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2551 return 0;
2553 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2554 We don't care about side effects in that case because the SAVE_EXPR
2555 takes care of that for us. In all other cases, two expressions are
2556 equal if they have no side effects. If we have two identical
2557 expressions with side effects that should be treated the same due
2558 to the only side effects being identical SAVE_EXPR's, that will
2559 be detected in the recursive calls below.
2560 If we are taking an invariant address of two identical objects
2561 they are necessarily equal as well. */
2562 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2563 && (TREE_CODE (arg0) == SAVE_EXPR
2564 || (flags & OEP_CONSTANT_ADDRESS_OF)
2565 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2566 return 1;
2568 /* Next handle constant cases, those for which we can return 1 even
2569 if ONLY_CONST is set. */
2570 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2571 switch (TREE_CODE (arg0))
2573 case INTEGER_CST:
2574 return tree_int_cst_equal (arg0, arg1);
2576 case FIXED_CST:
2577 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2578 TREE_FIXED_CST (arg1));
2580 case REAL_CST:
2581 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2582 TREE_REAL_CST (arg1)))
2583 return 1;
2586 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2588 /* If we do not distinguish between signed and unsigned zero,
2589 consider them equal. */
2590 if (real_zerop (arg0) && real_zerop (arg1))
2591 return 1;
2593 return 0;
2595 case VECTOR_CST:
2597 unsigned i;
2599 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2600 return 0;
2602 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2604 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2605 VECTOR_CST_ELT (arg1, i), flags))
2606 return 0;
2608 return 1;
2611 case COMPLEX_CST:
2612 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2613 flags)
2614 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2615 flags));
2617 case STRING_CST:
2618 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2619 && ! memcmp (TREE_STRING_POINTER (arg0),
2620 TREE_STRING_POINTER (arg1),
2621 TREE_STRING_LENGTH (arg0)));
2623 case ADDR_EXPR:
2624 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2625 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2626 ? OEP_CONSTANT_ADDRESS_OF : 0);
2627 default:
2628 break;
2631 if (flags & OEP_ONLY_CONST)
2632 return 0;
2634 /* Define macros to test an operand from arg0 and arg1 for equality and a
2635 variant that allows null and views null as being different from any
2636 non-null value. In the latter case, if either is null, the both
2637 must be; otherwise, do the normal comparison. */
2638 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2639 TREE_OPERAND (arg1, N), flags)
2641 #define OP_SAME_WITH_NULL(N) \
2642 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2643 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2645 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2647 case tcc_unary:
2648 /* Two conversions are equal only if signedness and modes match. */
2649 switch (TREE_CODE (arg0))
2651 CASE_CONVERT:
2652 case FIX_TRUNC_EXPR:
2653 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2654 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2655 return 0;
2656 break;
2657 default:
2658 break;
2661 return OP_SAME (0);
2664 case tcc_comparison:
2665 case tcc_binary:
2666 if (OP_SAME (0) && OP_SAME (1))
2667 return 1;
2669 /* For commutative ops, allow the other order. */
2670 return (commutative_tree_code (TREE_CODE (arg0))
2671 && operand_equal_p (TREE_OPERAND (arg0, 0),
2672 TREE_OPERAND (arg1, 1), flags)
2673 && operand_equal_p (TREE_OPERAND (arg0, 1),
2674 TREE_OPERAND (arg1, 0), flags));
2676 case tcc_reference:
2677 /* If either of the pointer (or reference) expressions we are
2678 dereferencing contain a side effect, these cannot be equal,
2679 but their addresses can be. */
2680 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2681 && (TREE_SIDE_EFFECTS (arg0)
2682 || TREE_SIDE_EFFECTS (arg1)))
2683 return 0;
2685 switch (TREE_CODE (arg0))
2687 case INDIRECT_REF:
2688 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2689 return OP_SAME (0);
2691 case REALPART_EXPR:
2692 case IMAGPART_EXPR:
2693 return OP_SAME (0);
2695 case TARGET_MEM_REF:
2696 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2697 /* Require equal extra operands and then fall through to MEM_REF
2698 handling of the two common operands. */
2699 if (!OP_SAME_WITH_NULL (2)
2700 || !OP_SAME_WITH_NULL (3)
2701 || !OP_SAME_WITH_NULL (4))
2702 return 0;
2703 /* Fallthru. */
2704 case MEM_REF:
2705 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2706 /* Require equal access sizes, and similar pointer types.
2707 We can have incomplete types for array references of
2708 variable-sized arrays from the Fortran frontend
2709 though. Also verify the types are compatible. */
2710 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2711 || (TYPE_SIZE (TREE_TYPE (arg0))
2712 && TYPE_SIZE (TREE_TYPE (arg1))
2713 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2714 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2715 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2716 && alias_ptr_types_compatible_p
2717 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2718 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2719 && OP_SAME (0) && OP_SAME (1));
2721 case ARRAY_REF:
2722 case ARRAY_RANGE_REF:
2723 /* Operands 2 and 3 may be null.
2724 Compare the array index by value if it is constant first as we
2725 may have different types but same value here. */
2726 if (!OP_SAME (0))
2727 return 0;
2728 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2729 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2730 TREE_OPERAND (arg1, 1))
2731 || OP_SAME (1))
2732 && OP_SAME_WITH_NULL (2)
2733 && OP_SAME_WITH_NULL (3));
2735 case COMPONENT_REF:
2736 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2737 may be NULL when we're called to compare MEM_EXPRs. */
2738 if (!OP_SAME_WITH_NULL (0)
2739 || !OP_SAME (1))
2740 return 0;
2741 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2742 return OP_SAME_WITH_NULL (2);
2744 case BIT_FIELD_REF:
2745 if (!OP_SAME (0))
2746 return 0;
2747 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2748 return OP_SAME (1) && OP_SAME (2);
2750 default:
2751 return 0;
2754 case tcc_expression:
2755 switch (TREE_CODE (arg0))
2757 case ADDR_EXPR:
2758 case TRUTH_NOT_EXPR:
2759 return OP_SAME (0);
2761 case TRUTH_ANDIF_EXPR:
2762 case TRUTH_ORIF_EXPR:
2763 return OP_SAME (0) && OP_SAME (1);
2765 case FMA_EXPR:
2766 case WIDEN_MULT_PLUS_EXPR:
2767 case WIDEN_MULT_MINUS_EXPR:
2768 if (!OP_SAME (2))
2769 return 0;
2770 /* The multiplcation operands are commutative. */
2771 /* FALLTHRU */
2773 case TRUTH_AND_EXPR:
2774 case TRUTH_OR_EXPR:
2775 case TRUTH_XOR_EXPR:
2776 if (OP_SAME (0) && OP_SAME (1))
2777 return 1;
2779 /* Otherwise take into account this is a commutative operation. */
2780 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2781 TREE_OPERAND (arg1, 1), flags)
2782 && operand_equal_p (TREE_OPERAND (arg0, 1),
2783 TREE_OPERAND (arg1, 0), flags));
2785 case COND_EXPR:
2786 case VEC_COND_EXPR:
2787 case DOT_PROD_EXPR:
2788 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2790 default:
2791 return 0;
2794 case tcc_vl_exp:
2795 switch (TREE_CODE (arg0))
2797 case CALL_EXPR:
2798 /* If the CALL_EXPRs call different functions, then they
2799 clearly can not be equal. */
2800 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2801 flags))
2802 return 0;
2805 unsigned int cef = call_expr_flags (arg0);
2806 if (flags & OEP_PURE_SAME)
2807 cef &= ECF_CONST | ECF_PURE;
2808 else
2809 cef &= ECF_CONST;
2810 if (!cef)
2811 return 0;
2814 /* Now see if all the arguments are the same. */
2816 const_call_expr_arg_iterator iter0, iter1;
2817 const_tree a0, a1;
2818 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2819 a1 = first_const_call_expr_arg (arg1, &iter1);
2820 a0 && a1;
2821 a0 = next_const_call_expr_arg (&iter0),
2822 a1 = next_const_call_expr_arg (&iter1))
2823 if (! operand_equal_p (a0, a1, flags))
2824 return 0;
2826 /* If we get here and both argument lists are exhausted
2827 then the CALL_EXPRs are equal. */
2828 return ! (a0 || a1);
2830 default:
2831 return 0;
2834 case tcc_declaration:
2835 /* Consider __builtin_sqrt equal to sqrt. */
2836 return (TREE_CODE (arg0) == FUNCTION_DECL
2837 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2838 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2839 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2841 default:
2842 return 0;
2845 #undef OP_SAME
2846 #undef OP_SAME_WITH_NULL
2849 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2850 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2852 When in doubt, return 0. */
2854 static int
2855 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2857 int unsignedp1, unsignedpo;
2858 tree primarg0, primarg1, primother;
2859 unsigned int correct_width;
2861 if (operand_equal_p (arg0, arg1, 0))
2862 return 1;
2864 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2865 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2866 return 0;
2868 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2869 and see if the inner values are the same. This removes any
2870 signedness comparison, which doesn't matter here. */
2871 primarg0 = arg0, primarg1 = arg1;
2872 STRIP_NOPS (primarg0);
2873 STRIP_NOPS (primarg1);
2874 if (operand_equal_p (primarg0, primarg1, 0))
2875 return 1;
2877 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2878 actual comparison operand, ARG0.
2880 First throw away any conversions to wider types
2881 already present in the operands. */
2883 primarg1 = get_narrower (arg1, &unsignedp1);
2884 primother = get_narrower (other, &unsignedpo);
2886 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2887 if (unsignedp1 == unsignedpo
2888 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2889 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2891 tree type = TREE_TYPE (arg0);
2893 /* Make sure shorter operand is extended the right way
2894 to match the longer operand. */
2895 primarg1 = fold_convert (signed_or_unsigned_type_for
2896 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2898 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2899 return 1;
2902 return 0;
2905 /* See if ARG is an expression that is either a comparison or is performing
2906 arithmetic on comparisons. The comparisons must only be comparing
2907 two different values, which will be stored in *CVAL1 and *CVAL2; if
2908 they are nonzero it means that some operands have already been found.
2909 No variables may be used anywhere else in the expression except in the
2910 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2911 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2913 If this is true, return 1. Otherwise, return zero. */
2915 static int
2916 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2918 enum tree_code code = TREE_CODE (arg);
2919 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2921 /* We can handle some of the tcc_expression cases here. */
2922 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2923 tclass = tcc_unary;
2924 else if (tclass == tcc_expression
2925 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2926 || code == COMPOUND_EXPR))
2927 tclass = tcc_binary;
2929 else if (tclass == tcc_expression && code == SAVE_EXPR
2930 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2932 /* If we've already found a CVAL1 or CVAL2, this expression is
2933 two complex to handle. */
2934 if (*cval1 || *cval2)
2935 return 0;
2937 tclass = tcc_unary;
2938 *save_p = 1;
2941 switch (tclass)
2943 case tcc_unary:
2944 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2946 case tcc_binary:
2947 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2948 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2949 cval1, cval2, save_p));
2951 case tcc_constant:
2952 return 1;
2954 case tcc_expression:
2955 if (code == COND_EXPR)
2956 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2957 cval1, cval2, save_p)
2958 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2959 cval1, cval2, save_p)
2960 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2961 cval1, cval2, save_p));
2962 return 0;
2964 case tcc_comparison:
2965 /* First see if we can handle the first operand, then the second. For
2966 the second operand, we know *CVAL1 can't be zero. It must be that
2967 one side of the comparison is each of the values; test for the
2968 case where this isn't true by failing if the two operands
2969 are the same. */
2971 if (operand_equal_p (TREE_OPERAND (arg, 0),
2972 TREE_OPERAND (arg, 1), 0))
2973 return 0;
2975 if (*cval1 == 0)
2976 *cval1 = TREE_OPERAND (arg, 0);
2977 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2979 else if (*cval2 == 0)
2980 *cval2 = TREE_OPERAND (arg, 0);
2981 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2983 else
2984 return 0;
2986 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2988 else if (*cval2 == 0)
2989 *cval2 = TREE_OPERAND (arg, 1);
2990 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2992 else
2993 return 0;
2995 return 1;
2997 default:
2998 return 0;
3002 /* ARG is a tree that is known to contain just arithmetic operations and
3003 comparisons. Evaluate the operations in the tree substituting NEW0 for
3004 any occurrence of OLD0 as an operand of a comparison and likewise for
3005 NEW1 and OLD1. */
3007 static tree
3008 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3009 tree old1, tree new1)
3011 tree type = TREE_TYPE (arg);
3012 enum tree_code code = TREE_CODE (arg);
3013 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3015 /* We can handle some of the tcc_expression cases here. */
3016 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3017 tclass = tcc_unary;
3018 else if (tclass == tcc_expression
3019 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3020 tclass = tcc_binary;
3022 switch (tclass)
3024 case tcc_unary:
3025 return fold_build1_loc (loc, code, type,
3026 eval_subst (loc, TREE_OPERAND (arg, 0),
3027 old0, new0, old1, new1));
3029 case tcc_binary:
3030 return fold_build2_loc (loc, code, type,
3031 eval_subst (loc, TREE_OPERAND (arg, 0),
3032 old0, new0, old1, new1),
3033 eval_subst (loc, TREE_OPERAND (arg, 1),
3034 old0, new0, old1, new1));
3036 case tcc_expression:
3037 switch (code)
3039 case SAVE_EXPR:
3040 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3041 old1, new1);
3043 case COMPOUND_EXPR:
3044 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3045 old1, new1);
3047 case COND_EXPR:
3048 return fold_build3_loc (loc, code, type,
3049 eval_subst (loc, TREE_OPERAND (arg, 0),
3050 old0, new0, old1, new1),
3051 eval_subst (loc, TREE_OPERAND (arg, 1),
3052 old0, new0, old1, new1),
3053 eval_subst (loc, TREE_OPERAND (arg, 2),
3054 old0, new0, old1, new1));
3055 default:
3056 break;
3058 /* Fall through - ??? */
3060 case tcc_comparison:
3062 tree arg0 = TREE_OPERAND (arg, 0);
3063 tree arg1 = TREE_OPERAND (arg, 1);
3065 /* We need to check both for exact equality and tree equality. The
3066 former will be true if the operand has a side-effect. In that
3067 case, we know the operand occurred exactly once. */
3069 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3070 arg0 = new0;
3071 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3072 arg0 = new1;
3074 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3075 arg1 = new0;
3076 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3077 arg1 = new1;
3079 return fold_build2_loc (loc, code, type, arg0, arg1);
3082 default:
3083 return arg;
3087 /* Return a tree for the case when the result of an expression is RESULT
3088 converted to TYPE and OMITTED was previously an operand of the expression
3089 but is now not needed (e.g., we folded OMITTED * 0).
3091 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3092 the conversion of RESULT to TYPE. */
3094 tree
3095 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3097 tree t = fold_convert_loc (loc, type, result);
3099 /* If the resulting operand is an empty statement, just return the omitted
3100 statement casted to void. */
3101 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3102 return build1_loc (loc, NOP_EXPR, void_type_node,
3103 fold_ignored_result (omitted));
3105 if (TREE_SIDE_EFFECTS (omitted))
3106 return build2_loc (loc, COMPOUND_EXPR, type,
3107 fold_ignored_result (omitted), t);
3109 return non_lvalue_loc (loc, t);
3112 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3114 static tree
3115 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3116 tree omitted)
3118 tree t = fold_convert_loc (loc, type, result);
3120 /* If the resulting operand is an empty statement, just return the omitted
3121 statement casted to void. */
3122 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3123 return build1_loc (loc, NOP_EXPR, void_type_node,
3124 fold_ignored_result (omitted));
3126 if (TREE_SIDE_EFFECTS (omitted))
3127 return build2_loc (loc, COMPOUND_EXPR, type,
3128 fold_ignored_result (omitted), t);
3130 return pedantic_non_lvalue_loc (loc, t);
3133 /* Return a tree for the case when the result of an expression is RESULT
3134 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3135 of the expression but are now not needed.
3137 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3138 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3139 evaluated before OMITTED2. Otherwise, if neither has side effects,
3140 just do the conversion of RESULT to TYPE. */
3142 tree
3143 omit_two_operands_loc (location_t loc, tree type, tree result,
3144 tree omitted1, tree omitted2)
3146 tree t = fold_convert_loc (loc, type, result);
3148 if (TREE_SIDE_EFFECTS (omitted2))
3149 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3150 if (TREE_SIDE_EFFECTS (omitted1))
3151 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3153 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3157 /* Return a simplified tree node for the truth-negation of ARG. This
3158 never alters ARG itself. We assume that ARG is an operation that
3159 returns a truth value (0 or 1).
3161 FIXME: one would think we would fold the result, but it causes
3162 problems with the dominator optimizer. */
3164 static tree
3165 fold_truth_not_expr (location_t loc, tree arg)
3167 tree type = TREE_TYPE (arg);
3168 enum tree_code code = TREE_CODE (arg);
3169 location_t loc1, loc2;
3171 /* If this is a comparison, we can simply invert it, except for
3172 floating-point non-equality comparisons, in which case we just
3173 enclose a TRUTH_NOT_EXPR around what we have. */
3175 if (TREE_CODE_CLASS (code) == tcc_comparison)
3177 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3178 if (FLOAT_TYPE_P (op_type)
3179 && flag_trapping_math
3180 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3181 && code != NE_EXPR && code != EQ_EXPR)
3182 return NULL_TREE;
3184 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3185 if (code == ERROR_MARK)
3186 return NULL_TREE;
3188 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3189 TREE_OPERAND (arg, 1));
3192 switch (code)
3194 case INTEGER_CST:
3195 return constant_boolean_node (integer_zerop (arg), type);
3197 case TRUTH_AND_EXPR:
3198 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3199 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3200 return build2_loc (loc, TRUTH_OR_EXPR, type,
3201 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3202 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3204 case TRUTH_OR_EXPR:
3205 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3206 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3207 return build2_loc (loc, TRUTH_AND_EXPR, type,
3208 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3209 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3211 case TRUTH_XOR_EXPR:
3212 /* Here we can invert either operand. We invert the first operand
3213 unless the second operand is a TRUTH_NOT_EXPR in which case our
3214 result is the XOR of the first operand with the inside of the
3215 negation of the second operand. */
3217 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3218 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3219 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3220 else
3221 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3222 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3223 TREE_OPERAND (arg, 1));
3225 case TRUTH_ANDIF_EXPR:
3226 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3227 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3228 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3229 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3230 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3232 case TRUTH_ORIF_EXPR:
3233 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3234 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3235 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3236 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3237 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3239 case TRUTH_NOT_EXPR:
3240 return TREE_OPERAND (arg, 0);
3242 case COND_EXPR:
3244 tree arg1 = TREE_OPERAND (arg, 1);
3245 tree arg2 = TREE_OPERAND (arg, 2);
3247 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3248 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3250 /* A COND_EXPR may have a throw as one operand, which
3251 then has void type. Just leave void operands
3252 as they are. */
3253 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3254 VOID_TYPE_P (TREE_TYPE (arg1))
3255 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3256 VOID_TYPE_P (TREE_TYPE (arg2))
3257 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3260 case COMPOUND_EXPR:
3261 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3262 return build2_loc (loc, COMPOUND_EXPR, type,
3263 TREE_OPERAND (arg, 0),
3264 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3266 case NON_LVALUE_EXPR:
3267 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3268 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3270 CASE_CONVERT:
3271 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3272 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3274 /* ... fall through ... */
3276 case FLOAT_EXPR:
3277 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3278 return build1_loc (loc, TREE_CODE (arg), type,
3279 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3281 case BIT_AND_EXPR:
3282 if (!integer_onep (TREE_OPERAND (arg, 1)))
3283 return NULL_TREE;
3284 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3286 case SAVE_EXPR:
3287 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3289 case CLEANUP_POINT_EXPR:
3290 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3291 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3292 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3294 default:
3295 return NULL_TREE;
3299 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3300 assume that ARG is an operation that returns a truth value (0 or 1
3301 for scalars, 0 or -1 for vectors). Return the folded expression if
3302 folding is successful. Otherwise, return NULL_TREE. */
3304 static tree
3305 fold_invert_truthvalue (location_t loc, tree arg)
3307 tree type = TREE_TYPE (arg);
3308 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3309 ? BIT_NOT_EXPR
3310 : TRUTH_NOT_EXPR,
3311 type, arg);
3314 /* Return a simplified tree node for the truth-negation of ARG. This
3315 never alters ARG itself. We assume that ARG is an operation that
3316 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3318 tree
3319 invert_truthvalue_loc (location_t loc, tree arg)
3321 if (TREE_CODE (arg) == ERROR_MARK)
3322 return arg;
3324 tree type = TREE_TYPE (arg);
3325 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3326 ? BIT_NOT_EXPR
3327 : TRUTH_NOT_EXPR,
3328 type, arg);
3331 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3332 operands are another bit-wise operation with a common input. If so,
3333 distribute the bit operations to save an operation and possibly two if
3334 constants are involved. For example, convert
3335 (A | B) & (A | C) into A | (B & C)
3336 Further simplification will occur if B and C are constants.
3338 If this optimization cannot be done, 0 will be returned. */
3340 static tree
3341 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3342 tree arg0, tree arg1)
3344 tree common;
3345 tree left, right;
3347 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3348 || TREE_CODE (arg0) == code
3349 || (TREE_CODE (arg0) != BIT_AND_EXPR
3350 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3351 return 0;
3353 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3355 common = TREE_OPERAND (arg0, 0);
3356 left = TREE_OPERAND (arg0, 1);
3357 right = TREE_OPERAND (arg1, 1);
3359 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3361 common = TREE_OPERAND (arg0, 0);
3362 left = TREE_OPERAND (arg0, 1);
3363 right = TREE_OPERAND (arg1, 0);
3365 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3367 common = TREE_OPERAND (arg0, 1);
3368 left = TREE_OPERAND (arg0, 0);
3369 right = TREE_OPERAND (arg1, 1);
3371 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3373 common = TREE_OPERAND (arg0, 1);
3374 left = TREE_OPERAND (arg0, 0);
3375 right = TREE_OPERAND (arg1, 0);
3377 else
3378 return 0;
3380 common = fold_convert_loc (loc, type, common);
3381 left = fold_convert_loc (loc, type, left);
3382 right = fold_convert_loc (loc, type, right);
3383 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3384 fold_build2_loc (loc, code, type, left, right));
3387 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3388 with code CODE. This optimization is unsafe. */
3389 static tree
3390 distribute_real_division (location_t loc, enum tree_code code, tree type,
3391 tree arg0, tree arg1)
3393 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3394 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3396 /* (A / C) +- (B / C) -> (A +- B) / C. */
3397 if (mul0 == mul1
3398 && operand_equal_p (TREE_OPERAND (arg0, 1),
3399 TREE_OPERAND (arg1, 1), 0))
3400 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3401 fold_build2_loc (loc, code, type,
3402 TREE_OPERAND (arg0, 0),
3403 TREE_OPERAND (arg1, 0)),
3404 TREE_OPERAND (arg0, 1));
3406 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3407 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3408 TREE_OPERAND (arg1, 0), 0)
3409 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3410 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3412 REAL_VALUE_TYPE r0, r1;
3413 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3414 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3415 if (!mul0)
3416 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3417 if (!mul1)
3418 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3419 real_arithmetic (&r0, code, &r0, &r1);
3420 return fold_build2_loc (loc, MULT_EXPR, type,
3421 TREE_OPERAND (arg0, 0),
3422 build_real (type, r0));
3425 return NULL_TREE;
3428 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3429 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3431 static tree
3432 make_bit_field_ref (location_t loc, tree inner, tree type,
3433 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3435 tree result, bftype;
3437 if (bitpos == 0)
3439 tree size = TYPE_SIZE (TREE_TYPE (inner));
3440 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3441 || POINTER_TYPE_P (TREE_TYPE (inner)))
3442 && tree_fits_shwi_p (size)
3443 && tree_to_shwi (size) == bitsize)
3444 return fold_convert_loc (loc, type, inner);
3447 bftype = type;
3448 if (TYPE_PRECISION (bftype) != bitsize
3449 || TYPE_UNSIGNED (bftype) == !unsignedp)
3450 bftype = build_nonstandard_integer_type (bitsize, 0);
3452 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3453 size_int (bitsize), bitsize_int (bitpos));
3455 if (bftype != type)
3456 result = fold_convert_loc (loc, type, result);
3458 return result;
3461 /* Optimize a bit-field compare.
3463 There are two cases: First is a compare against a constant and the
3464 second is a comparison of two items where the fields are at the same
3465 bit position relative to the start of a chunk (byte, halfword, word)
3466 large enough to contain it. In these cases we can avoid the shift
3467 implicit in bitfield extractions.
3469 For constants, we emit a compare of the shifted constant with the
3470 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3471 compared. For two fields at the same position, we do the ANDs with the
3472 similar mask and compare the result of the ANDs.
3474 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3475 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3476 are the left and right operands of the comparison, respectively.
3478 If the optimization described above can be done, we return the resulting
3479 tree. Otherwise we return zero. */
3481 static tree
3482 optimize_bit_field_compare (location_t loc, enum tree_code code,
3483 tree compare_type, tree lhs, tree rhs)
3485 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3486 tree type = TREE_TYPE (lhs);
3487 tree signed_type, unsigned_type;
3488 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3489 enum machine_mode lmode, rmode, nmode;
3490 int lunsignedp, runsignedp;
3491 int lvolatilep = 0, rvolatilep = 0;
3492 tree linner, rinner = NULL_TREE;
3493 tree mask;
3494 tree offset;
3496 /* Get all the information about the extractions being done. If the bit size
3497 if the same as the size of the underlying object, we aren't doing an
3498 extraction at all and so can do nothing. We also don't want to
3499 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3500 then will no longer be able to replace it. */
3501 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3502 &lunsignedp, &lvolatilep, false);
3503 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3504 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3505 return 0;
3507 if (!const_p)
3509 /* If this is not a constant, we can only do something if bit positions,
3510 sizes, and signedness are the same. */
3511 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3512 &runsignedp, &rvolatilep, false);
3514 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3515 || lunsignedp != runsignedp || offset != 0
3516 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3517 return 0;
3520 /* See if we can find a mode to refer to this field. We should be able to,
3521 but fail if we can't. */
3522 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3523 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3524 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3525 TYPE_ALIGN (TREE_TYPE (rinner))),
3526 word_mode, false);
3527 if (nmode == VOIDmode)
3528 return 0;
3530 /* Set signed and unsigned types of the precision of this mode for the
3531 shifts below. */
3532 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3533 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3535 /* Compute the bit position and size for the new reference and our offset
3536 within it. If the new reference is the same size as the original, we
3537 won't optimize anything, so return zero. */
3538 nbitsize = GET_MODE_BITSIZE (nmode);
3539 nbitpos = lbitpos & ~ (nbitsize - 1);
3540 lbitpos -= nbitpos;
3541 if (nbitsize == lbitsize)
3542 return 0;
3544 if (BYTES_BIG_ENDIAN)
3545 lbitpos = nbitsize - lbitsize - lbitpos;
3547 /* Make the mask to be used against the extracted field. */
3548 mask = build_int_cst_type (unsigned_type, -1);
3549 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3550 mask = const_binop (RSHIFT_EXPR, mask,
3551 size_int (nbitsize - lbitsize - lbitpos));
3553 if (! const_p)
3554 /* If not comparing with constant, just rework the comparison
3555 and return. */
3556 return fold_build2_loc (loc, code, compare_type,
3557 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3558 make_bit_field_ref (loc, linner,
3559 unsigned_type,
3560 nbitsize, nbitpos,
3562 mask),
3563 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3564 make_bit_field_ref (loc, rinner,
3565 unsigned_type,
3566 nbitsize, nbitpos,
3568 mask));
3570 /* Otherwise, we are handling the constant case. See if the constant is too
3571 big for the field. Warn and return a tree of for 0 (false) if so. We do
3572 this not only for its own sake, but to avoid having to test for this
3573 error case below. If we didn't, we might generate wrong code.
3575 For unsigned fields, the constant shifted right by the field length should
3576 be all zero. For signed fields, the high-order bits should agree with
3577 the sign bit. */
3579 if (lunsignedp)
3581 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3582 fold_convert_loc (loc,
3583 unsigned_type, rhs),
3584 size_int (lbitsize))))
3586 warning (0, "comparison is always %d due to width of bit-field",
3587 code == NE_EXPR);
3588 return constant_boolean_node (code == NE_EXPR, compare_type);
3591 else
3593 tree tem = const_binop (RSHIFT_EXPR,
3594 fold_convert_loc (loc, signed_type, rhs),
3595 size_int (lbitsize - 1));
3596 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3598 warning (0, "comparison is always %d due to width of bit-field",
3599 code == NE_EXPR);
3600 return constant_boolean_node (code == NE_EXPR, compare_type);
3604 /* Single-bit compares should always be against zero. */
3605 if (lbitsize == 1 && ! integer_zerop (rhs))
3607 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3608 rhs = build_int_cst (type, 0);
3611 /* Make a new bitfield reference, shift the constant over the
3612 appropriate number of bits and mask it with the computed mask
3613 (in case this was a signed field). If we changed it, make a new one. */
3614 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3616 rhs = const_binop (BIT_AND_EXPR,
3617 const_binop (LSHIFT_EXPR,
3618 fold_convert_loc (loc, unsigned_type, rhs),
3619 size_int (lbitpos)),
3620 mask);
3622 lhs = build2_loc (loc, code, compare_type,
3623 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3624 return lhs;
3627 /* Subroutine for fold_truth_andor_1: decode a field reference.
3629 If EXP is a comparison reference, we return the innermost reference.
3631 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3632 set to the starting bit number.
3634 If the innermost field can be completely contained in a mode-sized
3635 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3637 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3638 otherwise it is not changed.
3640 *PUNSIGNEDP is set to the signedness of the field.
3642 *PMASK is set to the mask used. This is either contained in a
3643 BIT_AND_EXPR or derived from the width of the field.
3645 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3647 Return 0 if this is not a component reference or is one that we can't
3648 do anything with. */
3650 static tree
3651 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3652 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3653 int *punsignedp, int *pvolatilep,
3654 tree *pmask, tree *pand_mask)
3656 tree outer_type = 0;
3657 tree and_mask = 0;
3658 tree mask, inner, offset;
3659 tree unsigned_type;
3660 unsigned int precision;
3662 /* All the optimizations using this function assume integer fields.
3663 There are problems with FP fields since the type_for_size call
3664 below can fail for, e.g., XFmode. */
3665 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3666 return 0;
3668 /* We are interested in the bare arrangement of bits, so strip everything
3669 that doesn't affect the machine mode. However, record the type of the
3670 outermost expression if it may matter below. */
3671 if (CONVERT_EXPR_P (exp)
3672 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3673 outer_type = TREE_TYPE (exp);
3674 STRIP_NOPS (exp);
3676 if (TREE_CODE (exp) == BIT_AND_EXPR)
3678 and_mask = TREE_OPERAND (exp, 1);
3679 exp = TREE_OPERAND (exp, 0);
3680 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3681 if (TREE_CODE (and_mask) != INTEGER_CST)
3682 return 0;
3685 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3686 punsignedp, pvolatilep, false);
3687 if ((inner == exp && and_mask == 0)
3688 || *pbitsize < 0 || offset != 0
3689 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3690 return 0;
3692 /* If the number of bits in the reference is the same as the bitsize of
3693 the outer type, then the outer type gives the signedness. Otherwise
3694 (in case of a small bitfield) the signedness is unchanged. */
3695 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3696 *punsignedp = TYPE_UNSIGNED (outer_type);
3698 /* Compute the mask to access the bitfield. */
3699 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3700 precision = TYPE_PRECISION (unsigned_type);
3702 mask = build_int_cst_type (unsigned_type, -1);
3704 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3705 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3707 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3708 if (and_mask != 0)
3709 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3710 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3712 *pmask = mask;
3713 *pand_mask = and_mask;
3714 return inner;
3717 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3718 bit positions. */
3720 static int
3721 all_ones_mask_p (const_tree mask, int size)
3723 tree type = TREE_TYPE (mask);
3724 unsigned int precision = TYPE_PRECISION (type);
3725 tree tmask;
3727 tmask = build_int_cst_type (signed_type_for (type), -1);
3729 return
3730 tree_int_cst_equal (mask,
3731 const_binop (RSHIFT_EXPR,
3732 const_binop (LSHIFT_EXPR, tmask,
3733 size_int (precision - size)),
3734 size_int (precision - size)));
3737 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3738 represents the sign bit of EXP's type. If EXP represents a sign
3739 or zero extension, also test VAL against the unextended type.
3740 The return value is the (sub)expression whose sign bit is VAL,
3741 or NULL_TREE otherwise. */
3743 static tree
3744 sign_bit_p (tree exp, const_tree val)
3746 unsigned HOST_WIDE_INT mask_lo, lo;
3747 HOST_WIDE_INT mask_hi, hi;
3748 int width;
3749 tree t;
3751 /* Tree EXP must have an integral type. */
3752 t = TREE_TYPE (exp);
3753 if (! INTEGRAL_TYPE_P (t))
3754 return NULL_TREE;
3756 /* Tree VAL must be an integer constant. */
3757 if (TREE_CODE (val) != INTEGER_CST
3758 || TREE_OVERFLOW (val))
3759 return NULL_TREE;
3761 width = TYPE_PRECISION (t);
3762 if (width > HOST_BITS_PER_WIDE_INT)
3764 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3765 lo = 0;
3767 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3768 mask_lo = -1;
3770 else
3772 hi = 0;
3773 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3775 mask_hi = 0;
3776 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3779 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3780 treat VAL as if it were unsigned. */
3781 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3782 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3783 return exp;
3785 /* Handle extension from a narrower type. */
3786 if (TREE_CODE (exp) == NOP_EXPR
3787 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3788 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3790 return NULL_TREE;
3793 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3794 to be evaluated unconditionally. */
3796 static int
3797 simple_operand_p (const_tree exp)
3799 /* Strip any conversions that don't change the machine mode. */
3800 STRIP_NOPS (exp);
3802 return (CONSTANT_CLASS_P (exp)
3803 || TREE_CODE (exp) == SSA_NAME
3804 || (DECL_P (exp)
3805 && ! TREE_ADDRESSABLE (exp)
3806 && ! TREE_THIS_VOLATILE (exp)
3807 && ! DECL_NONLOCAL (exp)
3808 /* Don't regard global variables as simple. They may be
3809 allocated in ways unknown to the compiler (shared memory,
3810 #pragma weak, etc). */
3811 && ! TREE_PUBLIC (exp)
3812 && ! DECL_EXTERNAL (exp)
3813 /* Weakrefs are not safe to be read, since they can be NULL.
3814 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3815 have DECL_WEAK flag set. */
3816 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3817 /* Loading a static variable is unduly expensive, but global
3818 registers aren't expensive. */
3819 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3822 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3823 to be evaluated unconditionally.
3824 I addition to simple_operand_p, we assume that comparisons, conversions,
3825 and logic-not operations are simple, if their operands are simple, too. */
3827 static bool
3828 simple_operand_p_2 (tree exp)
3830 enum tree_code code;
3832 if (TREE_SIDE_EFFECTS (exp)
3833 || tree_could_trap_p (exp))
3834 return false;
3836 while (CONVERT_EXPR_P (exp))
3837 exp = TREE_OPERAND (exp, 0);
3839 code = TREE_CODE (exp);
3841 if (TREE_CODE_CLASS (code) == tcc_comparison)
3842 return (simple_operand_p (TREE_OPERAND (exp, 0))
3843 && simple_operand_p (TREE_OPERAND (exp, 1)));
3845 if (code == TRUTH_NOT_EXPR)
3846 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3848 return simple_operand_p (exp);
3852 /* The following functions are subroutines to fold_range_test and allow it to
3853 try to change a logical combination of comparisons into a range test.
3855 For example, both
3856 X == 2 || X == 3 || X == 4 || X == 5
3858 X >= 2 && X <= 5
3859 are converted to
3860 (unsigned) (X - 2) <= 3
3862 We describe each set of comparisons as being either inside or outside
3863 a range, using a variable named like IN_P, and then describe the
3864 range with a lower and upper bound. If one of the bounds is omitted,
3865 it represents either the highest or lowest value of the type.
3867 In the comments below, we represent a range by two numbers in brackets
3868 preceded by a "+" to designate being inside that range, or a "-" to
3869 designate being outside that range, so the condition can be inverted by
3870 flipping the prefix. An omitted bound is represented by a "-". For
3871 example, "- [-, 10]" means being outside the range starting at the lowest
3872 possible value and ending at 10, in other words, being greater than 10.
3873 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3874 always false.
3876 We set up things so that the missing bounds are handled in a consistent
3877 manner so neither a missing bound nor "true" and "false" need to be
3878 handled using a special case. */
3880 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3881 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3882 and UPPER1_P are nonzero if the respective argument is an upper bound
3883 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3884 must be specified for a comparison. ARG1 will be converted to ARG0's
3885 type if both are specified. */
3887 static tree
3888 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3889 tree arg1, int upper1_p)
3891 tree tem;
3892 int result;
3893 int sgn0, sgn1;
3895 /* If neither arg represents infinity, do the normal operation.
3896 Else, if not a comparison, return infinity. Else handle the special
3897 comparison rules. Note that most of the cases below won't occur, but
3898 are handled for consistency. */
3900 if (arg0 != 0 && arg1 != 0)
3902 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3903 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3904 STRIP_NOPS (tem);
3905 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3908 if (TREE_CODE_CLASS (code) != tcc_comparison)
3909 return 0;
3911 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3912 for neither. In real maths, we cannot assume open ended ranges are
3913 the same. But, this is computer arithmetic, where numbers are finite.
3914 We can therefore make the transformation of any unbounded range with
3915 the value Z, Z being greater than any representable number. This permits
3916 us to treat unbounded ranges as equal. */
3917 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3918 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3919 switch (code)
3921 case EQ_EXPR:
3922 result = sgn0 == sgn1;
3923 break;
3924 case NE_EXPR:
3925 result = sgn0 != sgn1;
3926 break;
3927 case LT_EXPR:
3928 result = sgn0 < sgn1;
3929 break;
3930 case LE_EXPR:
3931 result = sgn0 <= sgn1;
3932 break;
3933 case GT_EXPR:
3934 result = sgn0 > sgn1;
3935 break;
3936 case GE_EXPR:
3937 result = sgn0 >= sgn1;
3938 break;
3939 default:
3940 gcc_unreachable ();
3943 return constant_boolean_node (result, type);
3946 /* Helper routine for make_range. Perform one step for it, return
3947 new expression if the loop should continue or NULL_TREE if it should
3948 stop. */
3950 tree
3951 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3952 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3953 bool *strict_overflow_p)
3955 tree arg0_type = TREE_TYPE (arg0);
3956 tree n_low, n_high, low = *p_low, high = *p_high;
3957 int in_p = *p_in_p, n_in_p;
3959 switch (code)
3961 case TRUTH_NOT_EXPR:
3962 /* We can only do something if the range is testing for zero. */
3963 if (low == NULL_TREE || high == NULL_TREE
3964 || ! integer_zerop (low) || ! integer_zerop (high))
3965 return NULL_TREE;
3966 *p_in_p = ! in_p;
3967 return arg0;
3969 case EQ_EXPR: case NE_EXPR:
3970 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3971 /* We can only do something if the range is testing for zero
3972 and if the second operand is an integer constant. Note that
3973 saying something is "in" the range we make is done by
3974 complementing IN_P since it will set in the initial case of
3975 being not equal to zero; "out" is leaving it alone. */
3976 if (low == NULL_TREE || high == NULL_TREE
3977 || ! integer_zerop (low) || ! integer_zerop (high)
3978 || TREE_CODE (arg1) != INTEGER_CST)
3979 return NULL_TREE;
3981 switch (code)
3983 case NE_EXPR: /* - [c, c] */
3984 low = high = arg1;
3985 break;
3986 case EQ_EXPR: /* + [c, c] */
3987 in_p = ! in_p, low = high = arg1;
3988 break;
3989 case GT_EXPR: /* - [-, c] */
3990 low = 0, high = arg1;
3991 break;
3992 case GE_EXPR: /* + [c, -] */
3993 in_p = ! in_p, low = arg1, high = 0;
3994 break;
3995 case LT_EXPR: /* - [c, -] */
3996 low = arg1, high = 0;
3997 break;
3998 case LE_EXPR: /* + [-, c] */
3999 in_p = ! in_p, low = 0, high = arg1;
4000 break;
4001 default:
4002 gcc_unreachable ();
4005 /* If this is an unsigned comparison, we also know that EXP is
4006 greater than or equal to zero. We base the range tests we make
4007 on that fact, so we record it here so we can parse existing
4008 range tests. We test arg0_type since often the return type
4009 of, e.g. EQ_EXPR, is boolean. */
4010 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4012 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4013 in_p, low, high, 1,
4014 build_int_cst (arg0_type, 0),
4015 NULL_TREE))
4016 return NULL_TREE;
4018 in_p = n_in_p, low = n_low, high = n_high;
4020 /* If the high bound is missing, but we have a nonzero low
4021 bound, reverse the range so it goes from zero to the low bound
4022 minus 1. */
4023 if (high == 0 && low && ! integer_zerop (low))
4025 in_p = ! in_p;
4026 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4027 integer_one_node, 0);
4028 low = build_int_cst (arg0_type, 0);
4032 *p_low = low;
4033 *p_high = high;
4034 *p_in_p = in_p;
4035 return arg0;
4037 case NEGATE_EXPR:
4038 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4039 low and high are non-NULL, then normalize will DTRT. */
4040 if (!TYPE_UNSIGNED (arg0_type)
4041 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4043 if (low == NULL_TREE)
4044 low = TYPE_MIN_VALUE (arg0_type);
4045 if (high == NULL_TREE)
4046 high = TYPE_MAX_VALUE (arg0_type);
4049 /* (-x) IN [a,b] -> x in [-b, -a] */
4050 n_low = range_binop (MINUS_EXPR, exp_type,
4051 build_int_cst (exp_type, 0),
4052 0, high, 1);
4053 n_high = range_binop (MINUS_EXPR, exp_type,
4054 build_int_cst (exp_type, 0),
4055 0, low, 0);
4056 if (n_high != 0 && TREE_OVERFLOW (n_high))
4057 return NULL_TREE;
4058 goto normalize;
4060 case BIT_NOT_EXPR:
4061 /* ~ X -> -X - 1 */
4062 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4063 build_int_cst (exp_type, 1));
4065 case PLUS_EXPR:
4066 case MINUS_EXPR:
4067 if (TREE_CODE (arg1) != INTEGER_CST)
4068 return NULL_TREE;
4070 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4071 move a constant to the other side. */
4072 if (!TYPE_UNSIGNED (arg0_type)
4073 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4074 return NULL_TREE;
4076 /* If EXP is signed, any overflow in the computation is undefined,
4077 so we don't worry about it so long as our computations on
4078 the bounds don't overflow. For unsigned, overflow is defined
4079 and this is exactly the right thing. */
4080 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4081 arg0_type, low, 0, arg1, 0);
4082 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4083 arg0_type, high, 1, arg1, 0);
4084 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4085 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4086 return NULL_TREE;
4088 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4089 *strict_overflow_p = true;
4091 normalize:
4092 /* Check for an unsigned range which has wrapped around the maximum
4093 value thus making n_high < n_low, and normalize it. */
4094 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4096 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4097 integer_one_node, 0);
4098 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4099 integer_one_node, 0);
4101 /* If the range is of the form +/- [ x+1, x ], we won't
4102 be able to normalize it. But then, it represents the
4103 whole range or the empty set, so make it
4104 +/- [ -, - ]. */
4105 if (tree_int_cst_equal (n_low, low)
4106 && tree_int_cst_equal (n_high, high))
4107 low = high = 0;
4108 else
4109 in_p = ! in_p;
4111 else
4112 low = n_low, high = n_high;
4114 *p_low = low;
4115 *p_high = high;
4116 *p_in_p = in_p;
4117 return arg0;
4119 CASE_CONVERT:
4120 case NON_LVALUE_EXPR:
4121 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4122 return NULL_TREE;
4124 if (! INTEGRAL_TYPE_P (arg0_type)
4125 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4126 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4127 return NULL_TREE;
4129 n_low = low, n_high = high;
4131 if (n_low != 0)
4132 n_low = fold_convert_loc (loc, arg0_type, n_low);
4134 if (n_high != 0)
4135 n_high = fold_convert_loc (loc, arg0_type, n_high);
4137 /* If we're converting arg0 from an unsigned type, to exp,
4138 a signed type, we will be doing the comparison as unsigned.
4139 The tests above have already verified that LOW and HIGH
4140 are both positive.
4142 So we have to ensure that we will handle large unsigned
4143 values the same way that the current signed bounds treat
4144 negative values. */
4146 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4148 tree high_positive;
4149 tree equiv_type;
4150 /* For fixed-point modes, we need to pass the saturating flag
4151 as the 2nd parameter. */
4152 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4153 equiv_type
4154 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4155 TYPE_SATURATING (arg0_type));
4156 else
4157 equiv_type
4158 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4160 /* A range without an upper bound is, naturally, unbounded.
4161 Since convert would have cropped a very large value, use
4162 the max value for the destination type. */
4163 high_positive
4164 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4165 : TYPE_MAX_VALUE (arg0_type);
4167 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4168 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4169 fold_convert_loc (loc, arg0_type,
4170 high_positive),
4171 build_int_cst (arg0_type, 1));
4173 /* If the low bound is specified, "and" the range with the
4174 range for which the original unsigned value will be
4175 positive. */
4176 if (low != 0)
4178 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4179 1, fold_convert_loc (loc, arg0_type,
4180 integer_zero_node),
4181 high_positive))
4182 return NULL_TREE;
4184 in_p = (n_in_p == in_p);
4186 else
4188 /* Otherwise, "or" the range with the range of the input
4189 that will be interpreted as negative. */
4190 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4191 1, fold_convert_loc (loc, arg0_type,
4192 integer_zero_node),
4193 high_positive))
4194 return NULL_TREE;
4196 in_p = (in_p != n_in_p);
4200 *p_low = n_low;
4201 *p_high = n_high;
4202 *p_in_p = in_p;
4203 return arg0;
4205 default:
4206 return NULL_TREE;
4210 /* Given EXP, a logical expression, set the range it is testing into
4211 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4212 actually being tested. *PLOW and *PHIGH will be made of the same
4213 type as the returned expression. If EXP is not a comparison, we
4214 will most likely not be returning a useful value and range. Set
4215 *STRICT_OVERFLOW_P to true if the return value is only valid
4216 because signed overflow is undefined; otherwise, do not change
4217 *STRICT_OVERFLOW_P. */
4219 tree
4220 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4221 bool *strict_overflow_p)
4223 enum tree_code code;
4224 tree arg0, arg1 = NULL_TREE;
4225 tree exp_type, nexp;
4226 int in_p;
4227 tree low, high;
4228 location_t loc = EXPR_LOCATION (exp);
4230 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4231 and see if we can refine the range. Some of the cases below may not
4232 happen, but it doesn't seem worth worrying about this. We "continue"
4233 the outer loop when we've changed something; otherwise we "break"
4234 the switch, which will "break" the while. */
4236 in_p = 0;
4237 low = high = build_int_cst (TREE_TYPE (exp), 0);
4239 while (1)
4241 code = TREE_CODE (exp);
4242 exp_type = TREE_TYPE (exp);
4243 arg0 = NULL_TREE;
4245 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4247 if (TREE_OPERAND_LENGTH (exp) > 0)
4248 arg0 = TREE_OPERAND (exp, 0);
4249 if (TREE_CODE_CLASS (code) == tcc_binary
4250 || TREE_CODE_CLASS (code) == tcc_comparison
4251 || (TREE_CODE_CLASS (code) == tcc_expression
4252 && TREE_OPERAND_LENGTH (exp) > 1))
4253 arg1 = TREE_OPERAND (exp, 1);
4255 if (arg0 == NULL_TREE)
4256 break;
4258 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4259 &high, &in_p, strict_overflow_p);
4260 if (nexp == NULL_TREE)
4261 break;
4262 exp = nexp;
4265 /* If EXP is a constant, we can evaluate whether this is true or false. */
4266 if (TREE_CODE (exp) == INTEGER_CST)
4268 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4269 exp, 0, low, 0))
4270 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4271 exp, 1, high, 1)));
4272 low = high = 0;
4273 exp = 0;
4276 *pin_p = in_p, *plow = low, *phigh = high;
4277 return exp;
4280 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4281 type, TYPE, return an expression to test if EXP is in (or out of, depending
4282 on IN_P) the range. Return 0 if the test couldn't be created. */
4284 tree
4285 build_range_check (location_t loc, tree type, tree exp, int in_p,
4286 tree low, tree high)
4288 tree etype = TREE_TYPE (exp), value;
4290 #ifdef HAVE_canonicalize_funcptr_for_compare
4291 /* Disable this optimization for function pointer expressions
4292 on targets that require function pointer canonicalization. */
4293 if (HAVE_canonicalize_funcptr_for_compare
4294 && TREE_CODE (etype) == POINTER_TYPE
4295 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4296 return NULL_TREE;
4297 #endif
4299 if (! in_p)
4301 value = build_range_check (loc, type, exp, 1, low, high);
4302 if (value != 0)
4303 return invert_truthvalue_loc (loc, value);
4305 return 0;
4308 if (low == 0 && high == 0)
4309 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4311 if (low == 0)
4312 return fold_build2_loc (loc, LE_EXPR, type, exp,
4313 fold_convert_loc (loc, etype, high));
4315 if (high == 0)
4316 return fold_build2_loc (loc, GE_EXPR, type, exp,
4317 fold_convert_loc (loc, etype, low));
4319 if (operand_equal_p (low, high, 0))
4320 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4321 fold_convert_loc (loc, etype, low));
4323 if (integer_zerop (low))
4325 if (! TYPE_UNSIGNED (etype))
4327 etype = unsigned_type_for (etype);
4328 high = fold_convert_loc (loc, etype, high);
4329 exp = fold_convert_loc (loc, etype, exp);
4331 return build_range_check (loc, type, exp, 1, 0, high);
4334 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4335 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4337 unsigned HOST_WIDE_INT lo;
4338 HOST_WIDE_INT hi;
4339 int prec;
4341 prec = TYPE_PRECISION (etype);
4342 if (prec <= HOST_BITS_PER_WIDE_INT)
4344 hi = 0;
4345 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4347 else
4349 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4350 lo = HOST_WIDE_INT_M1U;
4353 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4355 if (TYPE_UNSIGNED (etype))
4357 tree signed_etype = signed_type_for (etype);
4358 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4359 etype
4360 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4361 else
4362 etype = signed_etype;
4363 exp = fold_convert_loc (loc, etype, exp);
4365 return fold_build2_loc (loc, GT_EXPR, type, exp,
4366 build_int_cst (etype, 0));
4370 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4371 This requires wrap-around arithmetics for the type of the expression.
4372 First make sure that arithmetics in this type is valid, then make sure
4373 that it wraps around. */
4374 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4375 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4376 TYPE_UNSIGNED (etype));
4378 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4380 tree utype, minv, maxv;
4382 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4383 for the type in question, as we rely on this here. */
4384 utype = unsigned_type_for (etype);
4385 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4386 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4387 integer_one_node, 1);
4388 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4390 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4391 minv, 1, maxv, 1)))
4392 etype = utype;
4393 else
4394 return 0;
4397 high = fold_convert_loc (loc, etype, high);
4398 low = fold_convert_loc (loc, etype, low);
4399 exp = fold_convert_loc (loc, etype, exp);
4401 value = const_binop (MINUS_EXPR, high, low);
4404 if (POINTER_TYPE_P (etype))
4406 if (value != 0 && !TREE_OVERFLOW (value))
4408 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4409 return build_range_check (loc, type,
4410 fold_build_pointer_plus_loc (loc, exp, low),
4411 1, build_int_cst (etype, 0), value);
4413 return 0;
4416 if (value != 0 && !TREE_OVERFLOW (value))
4417 return build_range_check (loc, type,
4418 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4419 1, build_int_cst (etype, 0), value);
4421 return 0;
4424 /* Return the predecessor of VAL in its type, handling the infinite case. */
4426 static tree
4427 range_predecessor (tree val)
4429 tree type = TREE_TYPE (val);
4431 if (INTEGRAL_TYPE_P (type)
4432 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4433 return 0;
4434 else
4435 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4438 /* Return the successor of VAL in its type, handling the infinite case. */
4440 static tree
4441 range_successor (tree val)
4443 tree type = TREE_TYPE (val);
4445 if (INTEGRAL_TYPE_P (type)
4446 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4447 return 0;
4448 else
4449 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4452 /* Given two ranges, see if we can merge them into one. Return 1 if we
4453 can, 0 if we can't. Set the output range into the specified parameters. */
4455 bool
4456 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4457 tree high0, int in1_p, tree low1, tree high1)
4459 int no_overlap;
4460 int subset;
4461 int temp;
4462 tree tem;
4463 int in_p;
4464 tree low, high;
4465 int lowequal = ((low0 == 0 && low1 == 0)
4466 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4467 low0, 0, low1, 0)));
4468 int highequal = ((high0 == 0 && high1 == 0)
4469 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4470 high0, 1, high1, 1)));
4472 /* Make range 0 be the range that starts first, or ends last if they
4473 start at the same value. Swap them if it isn't. */
4474 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4475 low0, 0, low1, 0))
4476 || (lowequal
4477 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4478 high1, 1, high0, 1))))
4480 temp = in0_p, in0_p = in1_p, in1_p = temp;
4481 tem = low0, low0 = low1, low1 = tem;
4482 tem = high0, high0 = high1, high1 = tem;
4485 /* Now flag two cases, whether the ranges are disjoint or whether the
4486 second range is totally subsumed in the first. Note that the tests
4487 below are simplified by the ones above. */
4488 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4489 high0, 1, low1, 0));
4490 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4491 high1, 1, high0, 1));
4493 /* We now have four cases, depending on whether we are including or
4494 excluding the two ranges. */
4495 if (in0_p && in1_p)
4497 /* If they don't overlap, the result is false. If the second range
4498 is a subset it is the result. Otherwise, the range is from the start
4499 of the second to the end of the first. */
4500 if (no_overlap)
4501 in_p = 0, low = high = 0;
4502 else if (subset)
4503 in_p = 1, low = low1, high = high1;
4504 else
4505 in_p = 1, low = low1, high = high0;
4508 else if (in0_p && ! in1_p)
4510 /* If they don't overlap, the result is the first range. If they are
4511 equal, the result is false. If the second range is a subset of the
4512 first, and the ranges begin at the same place, we go from just after
4513 the end of the second range to the end of the first. If the second
4514 range is not a subset of the first, or if it is a subset and both
4515 ranges end at the same place, the range starts at the start of the
4516 first range and ends just before the second range.
4517 Otherwise, we can't describe this as a single range. */
4518 if (no_overlap)
4519 in_p = 1, low = low0, high = high0;
4520 else if (lowequal && highequal)
4521 in_p = 0, low = high = 0;
4522 else if (subset && lowequal)
4524 low = range_successor (high1);
4525 high = high0;
4526 in_p = 1;
4527 if (low == 0)
4529 /* We are in the weird situation where high0 > high1 but
4530 high1 has no successor. Punt. */
4531 return 0;
4534 else if (! subset || highequal)
4536 low = low0;
4537 high = range_predecessor (low1);
4538 in_p = 1;
4539 if (high == 0)
4541 /* low0 < low1 but low1 has no predecessor. Punt. */
4542 return 0;
4545 else
4546 return 0;
4549 else if (! in0_p && in1_p)
4551 /* If they don't overlap, the result is the second range. If the second
4552 is a subset of the first, the result is false. Otherwise,
4553 the range starts just after the first range and ends at the
4554 end of the second. */
4555 if (no_overlap)
4556 in_p = 1, low = low1, high = high1;
4557 else if (subset || highequal)
4558 in_p = 0, low = high = 0;
4559 else
4561 low = range_successor (high0);
4562 high = high1;
4563 in_p = 1;
4564 if (low == 0)
4566 /* high1 > high0 but high0 has no successor. Punt. */
4567 return 0;
4572 else
4574 /* The case where we are excluding both ranges. Here the complex case
4575 is if they don't overlap. In that case, the only time we have a
4576 range is if they are adjacent. If the second is a subset of the
4577 first, the result is the first. Otherwise, the range to exclude
4578 starts at the beginning of the first range and ends at the end of the
4579 second. */
4580 if (no_overlap)
4582 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4583 range_successor (high0),
4584 1, low1, 0)))
4585 in_p = 0, low = low0, high = high1;
4586 else
4588 /* Canonicalize - [min, x] into - [-, x]. */
4589 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4590 switch (TREE_CODE (TREE_TYPE (low0)))
4592 case ENUMERAL_TYPE:
4593 if (TYPE_PRECISION (TREE_TYPE (low0))
4594 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4595 break;
4596 /* FALLTHROUGH */
4597 case INTEGER_TYPE:
4598 if (tree_int_cst_equal (low0,
4599 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4600 low0 = 0;
4601 break;
4602 case POINTER_TYPE:
4603 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4604 && integer_zerop (low0))
4605 low0 = 0;
4606 break;
4607 default:
4608 break;
4611 /* Canonicalize - [x, max] into - [x, -]. */
4612 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4613 switch (TREE_CODE (TREE_TYPE (high1)))
4615 case ENUMERAL_TYPE:
4616 if (TYPE_PRECISION (TREE_TYPE (high1))
4617 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4618 break;
4619 /* FALLTHROUGH */
4620 case INTEGER_TYPE:
4621 if (tree_int_cst_equal (high1,
4622 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4623 high1 = 0;
4624 break;
4625 case POINTER_TYPE:
4626 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4627 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4628 high1, 1,
4629 integer_one_node, 1)))
4630 high1 = 0;
4631 break;
4632 default:
4633 break;
4636 /* The ranges might be also adjacent between the maximum and
4637 minimum values of the given type. For
4638 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4639 return + [x + 1, y - 1]. */
4640 if (low0 == 0 && high1 == 0)
4642 low = range_successor (high0);
4643 high = range_predecessor (low1);
4644 if (low == 0 || high == 0)
4645 return 0;
4647 in_p = 1;
4649 else
4650 return 0;
4653 else if (subset)
4654 in_p = 0, low = low0, high = high0;
4655 else
4656 in_p = 0, low = low0, high = high1;
4659 *pin_p = in_p, *plow = low, *phigh = high;
4660 return 1;
4664 /* Subroutine of fold, looking inside expressions of the form
4665 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4666 of the COND_EXPR. This function is being used also to optimize
4667 A op B ? C : A, by reversing the comparison first.
4669 Return a folded expression whose code is not a COND_EXPR
4670 anymore, or NULL_TREE if no folding opportunity is found. */
4672 static tree
4673 fold_cond_expr_with_comparison (location_t loc, tree type,
4674 tree arg0, tree arg1, tree arg2)
4676 enum tree_code comp_code = TREE_CODE (arg0);
4677 tree arg00 = TREE_OPERAND (arg0, 0);
4678 tree arg01 = TREE_OPERAND (arg0, 1);
4679 tree arg1_type = TREE_TYPE (arg1);
4680 tree tem;
4682 STRIP_NOPS (arg1);
4683 STRIP_NOPS (arg2);
4685 /* If we have A op 0 ? A : -A, consider applying the following
4686 transformations:
4688 A == 0? A : -A same as -A
4689 A != 0? A : -A same as A
4690 A >= 0? A : -A same as abs (A)
4691 A > 0? A : -A same as abs (A)
4692 A <= 0? A : -A same as -abs (A)
4693 A < 0? A : -A same as -abs (A)
4695 None of these transformations work for modes with signed
4696 zeros. If A is +/-0, the first two transformations will
4697 change the sign of the result (from +0 to -0, or vice
4698 versa). The last four will fix the sign of the result,
4699 even though the original expressions could be positive or
4700 negative, depending on the sign of A.
4702 Note that all these transformations are correct if A is
4703 NaN, since the two alternatives (A and -A) are also NaNs. */
4704 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4705 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4706 ? real_zerop (arg01)
4707 : integer_zerop (arg01))
4708 && ((TREE_CODE (arg2) == NEGATE_EXPR
4709 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4710 /* In the case that A is of the form X-Y, '-A' (arg2) may
4711 have already been folded to Y-X, check for that. */
4712 || (TREE_CODE (arg1) == MINUS_EXPR
4713 && TREE_CODE (arg2) == MINUS_EXPR
4714 && operand_equal_p (TREE_OPERAND (arg1, 0),
4715 TREE_OPERAND (arg2, 1), 0)
4716 && operand_equal_p (TREE_OPERAND (arg1, 1),
4717 TREE_OPERAND (arg2, 0), 0))))
4718 switch (comp_code)
4720 case EQ_EXPR:
4721 case UNEQ_EXPR:
4722 tem = fold_convert_loc (loc, arg1_type, arg1);
4723 return pedantic_non_lvalue_loc (loc,
4724 fold_convert_loc (loc, type,
4725 negate_expr (tem)));
4726 case NE_EXPR:
4727 case LTGT_EXPR:
4728 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4729 case UNGE_EXPR:
4730 case UNGT_EXPR:
4731 if (flag_trapping_math)
4732 break;
4733 /* Fall through. */
4734 case GE_EXPR:
4735 case GT_EXPR:
4736 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4737 arg1 = fold_convert_loc (loc, signed_type_for
4738 (TREE_TYPE (arg1)), arg1);
4739 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4740 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4741 case UNLE_EXPR:
4742 case UNLT_EXPR:
4743 if (flag_trapping_math)
4744 break;
4745 case LE_EXPR:
4746 case LT_EXPR:
4747 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4748 arg1 = fold_convert_loc (loc, signed_type_for
4749 (TREE_TYPE (arg1)), arg1);
4750 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4751 return negate_expr (fold_convert_loc (loc, type, tem));
4752 default:
4753 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4754 break;
4757 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4758 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4759 both transformations are correct when A is NaN: A != 0
4760 is then true, and A == 0 is false. */
4762 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4763 && integer_zerop (arg01) && integer_zerop (arg2))
4765 if (comp_code == NE_EXPR)
4766 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4767 else if (comp_code == EQ_EXPR)
4768 return build_zero_cst (type);
4771 /* Try some transformations of A op B ? A : B.
4773 A == B? A : B same as B
4774 A != B? A : B same as A
4775 A >= B? A : B same as max (A, B)
4776 A > B? A : B same as max (B, A)
4777 A <= B? A : B same as min (A, B)
4778 A < B? A : B same as min (B, A)
4780 As above, these transformations don't work in the presence
4781 of signed zeros. For example, if A and B are zeros of
4782 opposite sign, the first two transformations will change
4783 the sign of the result. In the last four, the original
4784 expressions give different results for (A=+0, B=-0) and
4785 (A=-0, B=+0), but the transformed expressions do not.
4787 The first two transformations are correct if either A or B
4788 is a NaN. In the first transformation, the condition will
4789 be false, and B will indeed be chosen. In the case of the
4790 second transformation, the condition A != B will be true,
4791 and A will be chosen.
4793 The conversions to max() and min() are not correct if B is
4794 a number and A is not. The conditions in the original
4795 expressions will be false, so all four give B. The min()
4796 and max() versions would give a NaN instead. */
4797 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4798 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4799 /* Avoid these transformations if the COND_EXPR may be used
4800 as an lvalue in the C++ front-end. PR c++/19199. */
4801 && (in_gimple_form
4802 || VECTOR_TYPE_P (type)
4803 || (strcmp (lang_hooks.name, "GNU C++") != 0
4804 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4805 || ! maybe_lvalue_p (arg1)
4806 || ! maybe_lvalue_p (arg2)))
4808 tree comp_op0 = arg00;
4809 tree comp_op1 = arg01;
4810 tree comp_type = TREE_TYPE (comp_op0);
4812 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4813 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4815 comp_type = type;
4816 comp_op0 = arg1;
4817 comp_op1 = arg2;
4820 switch (comp_code)
4822 case EQ_EXPR:
4823 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4824 case NE_EXPR:
4825 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4826 case LE_EXPR:
4827 case LT_EXPR:
4828 case UNLE_EXPR:
4829 case UNLT_EXPR:
4830 /* In C++ a ?: expression can be an lvalue, so put the
4831 operand which will be used if they are equal first
4832 so that we can convert this back to the
4833 corresponding COND_EXPR. */
4834 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4836 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4837 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4838 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4839 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4840 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4841 comp_op1, comp_op0);
4842 return pedantic_non_lvalue_loc (loc,
4843 fold_convert_loc (loc, type, tem));
4845 break;
4846 case GE_EXPR:
4847 case GT_EXPR:
4848 case UNGE_EXPR:
4849 case UNGT_EXPR:
4850 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4852 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4853 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4854 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4855 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4856 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4857 comp_op1, comp_op0);
4858 return pedantic_non_lvalue_loc (loc,
4859 fold_convert_loc (loc, type, tem));
4861 break;
4862 case UNEQ_EXPR:
4863 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4864 return pedantic_non_lvalue_loc (loc,
4865 fold_convert_loc (loc, type, arg2));
4866 break;
4867 case LTGT_EXPR:
4868 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4869 return pedantic_non_lvalue_loc (loc,
4870 fold_convert_loc (loc, type, arg1));
4871 break;
4872 default:
4873 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4874 break;
4878 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4879 we might still be able to simplify this. For example,
4880 if C1 is one less or one more than C2, this might have started
4881 out as a MIN or MAX and been transformed by this function.
4882 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4884 if (INTEGRAL_TYPE_P (type)
4885 && TREE_CODE (arg01) == INTEGER_CST
4886 && TREE_CODE (arg2) == INTEGER_CST)
4887 switch (comp_code)
4889 case EQ_EXPR:
4890 if (TREE_CODE (arg1) == INTEGER_CST)
4891 break;
4892 /* We can replace A with C1 in this case. */
4893 arg1 = fold_convert_loc (loc, type, arg01);
4894 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4896 case LT_EXPR:
4897 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4898 MIN_EXPR, to preserve the signedness of the comparison. */
4899 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4900 OEP_ONLY_CONST)
4901 && operand_equal_p (arg01,
4902 const_binop (PLUS_EXPR, arg2,
4903 build_int_cst (type, 1)),
4904 OEP_ONLY_CONST))
4906 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4907 fold_convert_loc (loc, TREE_TYPE (arg00),
4908 arg2));
4909 return pedantic_non_lvalue_loc (loc,
4910 fold_convert_loc (loc, type, tem));
4912 break;
4914 case LE_EXPR:
4915 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4916 as above. */
4917 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4918 OEP_ONLY_CONST)
4919 && operand_equal_p (arg01,
4920 const_binop (MINUS_EXPR, arg2,
4921 build_int_cst (type, 1)),
4922 OEP_ONLY_CONST))
4924 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4925 fold_convert_loc (loc, TREE_TYPE (arg00),
4926 arg2));
4927 return pedantic_non_lvalue_loc (loc,
4928 fold_convert_loc (loc, type, tem));
4930 break;
4932 case GT_EXPR:
4933 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4934 MAX_EXPR, to preserve the signedness of the comparison. */
4935 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4936 OEP_ONLY_CONST)
4937 && operand_equal_p (arg01,
4938 const_binop (MINUS_EXPR, arg2,
4939 build_int_cst (type, 1)),
4940 OEP_ONLY_CONST))
4942 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4943 fold_convert_loc (loc, TREE_TYPE (arg00),
4944 arg2));
4945 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4947 break;
4949 case GE_EXPR:
4950 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4951 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4952 OEP_ONLY_CONST)
4953 && operand_equal_p (arg01,
4954 const_binop (PLUS_EXPR, arg2,
4955 build_int_cst (type, 1)),
4956 OEP_ONLY_CONST))
4958 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4959 fold_convert_loc (loc, TREE_TYPE (arg00),
4960 arg2));
4961 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4963 break;
4964 case NE_EXPR:
4965 break;
4966 default:
4967 gcc_unreachable ();
4970 return NULL_TREE;
4975 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4976 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4977 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4978 false) >= 2)
4979 #endif
4981 /* EXP is some logical combination of boolean tests. See if we can
4982 merge it into some range test. Return the new tree if so. */
4984 static tree
4985 fold_range_test (location_t loc, enum tree_code code, tree type,
4986 tree op0, tree op1)
4988 int or_op = (code == TRUTH_ORIF_EXPR
4989 || code == TRUTH_OR_EXPR);
4990 int in0_p, in1_p, in_p;
4991 tree low0, low1, low, high0, high1, high;
4992 bool strict_overflow_p = false;
4993 tree tem, lhs, rhs;
4994 const char * const warnmsg = G_("assuming signed overflow does not occur "
4995 "when simplifying range test");
4997 if (!INTEGRAL_TYPE_P (type))
4998 return 0;
5000 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5001 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5003 /* If this is an OR operation, invert both sides; we will invert
5004 again at the end. */
5005 if (or_op)
5006 in0_p = ! in0_p, in1_p = ! in1_p;
5008 /* If both expressions are the same, if we can merge the ranges, and we
5009 can build the range test, return it or it inverted. If one of the
5010 ranges is always true or always false, consider it to be the same
5011 expression as the other. */
5012 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5013 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5014 in1_p, low1, high1)
5015 && 0 != (tem = (build_range_check (loc, type,
5016 lhs != 0 ? lhs
5017 : rhs != 0 ? rhs : integer_zero_node,
5018 in_p, low, high))))
5020 if (strict_overflow_p)
5021 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5022 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5025 /* On machines where the branch cost is expensive, if this is a
5026 short-circuited branch and the underlying object on both sides
5027 is the same, make a non-short-circuit operation. */
5028 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5029 && lhs != 0 && rhs != 0
5030 && (code == TRUTH_ANDIF_EXPR
5031 || code == TRUTH_ORIF_EXPR)
5032 && operand_equal_p (lhs, rhs, 0))
5034 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5035 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5036 which cases we can't do this. */
5037 if (simple_operand_p (lhs))
5038 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5039 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5040 type, op0, op1);
5042 else if (!lang_hooks.decls.global_bindings_p ()
5043 && !CONTAINS_PLACEHOLDER_P (lhs))
5045 tree common = save_expr (lhs);
5047 if (0 != (lhs = build_range_check (loc, type, common,
5048 or_op ? ! in0_p : in0_p,
5049 low0, high0))
5050 && (0 != (rhs = build_range_check (loc, type, common,
5051 or_op ? ! in1_p : in1_p,
5052 low1, high1))))
5054 if (strict_overflow_p)
5055 fold_overflow_warning (warnmsg,
5056 WARN_STRICT_OVERFLOW_COMPARISON);
5057 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5058 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5059 type, lhs, rhs);
5064 return 0;
5067 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5068 bit value. Arrange things so the extra bits will be set to zero if and
5069 only if C is signed-extended to its full width. If MASK is nonzero,
5070 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5072 static tree
5073 unextend (tree c, int p, int unsignedp, tree mask)
5075 tree type = TREE_TYPE (c);
5076 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5077 tree temp;
5079 if (p == modesize || unsignedp)
5080 return c;
5082 /* We work by getting just the sign bit into the low-order bit, then
5083 into the high-order bit, then sign-extend. We then XOR that value
5084 with C. */
5085 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5086 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5088 /* We must use a signed type in order to get an arithmetic right shift.
5089 However, we must also avoid introducing accidental overflows, so that
5090 a subsequent call to integer_zerop will work. Hence we must
5091 do the type conversion here. At this point, the constant is either
5092 zero or one, and the conversion to a signed type can never overflow.
5093 We could get an overflow if this conversion is done anywhere else. */
5094 if (TYPE_UNSIGNED (type))
5095 temp = fold_convert (signed_type_for (type), temp);
5097 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5098 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5099 if (mask != 0)
5100 temp = const_binop (BIT_AND_EXPR, temp,
5101 fold_convert (TREE_TYPE (c), mask));
5102 /* If necessary, convert the type back to match the type of C. */
5103 if (TYPE_UNSIGNED (type))
5104 temp = fold_convert (type, temp);
5106 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5109 /* For an expression that has the form
5110 (A && B) || ~B
5112 (A || B) && ~B,
5113 we can drop one of the inner expressions and simplify to
5114 A || ~B
5116 A && ~B
5117 LOC is the location of the resulting expression. OP is the inner
5118 logical operation; the left-hand side in the examples above, while CMPOP
5119 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5120 removing a condition that guards another, as in
5121 (A != NULL && A->...) || A == NULL
5122 which we must not transform. If RHS_ONLY is true, only eliminate the
5123 right-most operand of the inner logical operation. */
5125 static tree
5126 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5127 bool rhs_only)
5129 tree type = TREE_TYPE (cmpop);
5130 enum tree_code code = TREE_CODE (cmpop);
5131 enum tree_code truthop_code = TREE_CODE (op);
5132 tree lhs = TREE_OPERAND (op, 0);
5133 tree rhs = TREE_OPERAND (op, 1);
5134 tree orig_lhs = lhs, orig_rhs = rhs;
5135 enum tree_code rhs_code = TREE_CODE (rhs);
5136 enum tree_code lhs_code = TREE_CODE (lhs);
5137 enum tree_code inv_code;
5139 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5140 return NULL_TREE;
5142 if (TREE_CODE_CLASS (code) != tcc_comparison)
5143 return NULL_TREE;
5145 if (rhs_code == truthop_code)
5147 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5148 if (newrhs != NULL_TREE)
5150 rhs = newrhs;
5151 rhs_code = TREE_CODE (rhs);
5154 if (lhs_code == truthop_code && !rhs_only)
5156 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5157 if (newlhs != NULL_TREE)
5159 lhs = newlhs;
5160 lhs_code = TREE_CODE (lhs);
5164 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5165 if (inv_code == rhs_code
5166 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5167 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5168 return lhs;
5169 if (!rhs_only && inv_code == lhs_code
5170 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5171 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5172 return rhs;
5173 if (rhs != orig_rhs || lhs != orig_lhs)
5174 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5175 lhs, rhs);
5176 return NULL_TREE;
5179 /* Find ways of folding logical expressions of LHS and RHS:
5180 Try to merge two comparisons to the same innermost item.
5181 Look for range tests like "ch >= '0' && ch <= '9'".
5182 Look for combinations of simple terms on machines with expensive branches
5183 and evaluate the RHS unconditionally.
5185 For example, if we have p->a == 2 && p->b == 4 and we can make an
5186 object large enough to span both A and B, we can do this with a comparison
5187 against the object ANDed with the a mask.
5189 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5190 operations to do this with one comparison.
5192 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5193 function and the one above.
5195 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5196 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5198 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5199 two operands.
5201 We return the simplified tree or 0 if no optimization is possible. */
5203 static tree
5204 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5205 tree lhs, tree rhs)
5207 /* If this is the "or" of two comparisons, we can do something if
5208 the comparisons are NE_EXPR. If this is the "and", we can do something
5209 if the comparisons are EQ_EXPR. I.e.,
5210 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5212 WANTED_CODE is this operation code. For single bit fields, we can
5213 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5214 comparison for one-bit fields. */
5216 enum tree_code wanted_code;
5217 enum tree_code lcode, rcode;
5218 tree ll_arg, lr_arg, rl_arg, rr_arg;
5219 tree ll_inner, lr_inner, rl_inner, rr_inner;
5220 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5221 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5222 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5223 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5224 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5225 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5226 enum machine_mode lnmode, rnmode;
5227 tree ll_mask, lr_mask, rl_mask, rr_mask;
5228 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5229 tree l_const, r_const;
5230 tree lntype, rntype, result;
5231 HOST_WIDE_INT first_bit, end_bit;
5232 int volatilep;
5234 /* Start by getting the comparison codes. Fail if anything is volatile.
5235 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5236 it were surrounded with a NE_EXPR. */
5238 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5239 return 0;
5241 lcode = TREE_CODE (lhs);
5242 rcode = TREE_CODE (rhs);
5244 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5246 lhs = build2 (NE_EXPR, truth_type, lhs,
5247 build_int_cst (TREE_TYPE (lhs), 0));
5248 lcode = NE_EXPR;
5251 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5253 rhs = build2 (NE_EXPR, truth_type, rhs,
5254 build_int_cst (TREE_TYPE (rhs), 0));
5255 rcode = NE_EXPR;
5258 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5259 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5260 return 0;
5262 ll_arg = TREE_OPERAND (lhs, 0);
5263 lr_arg = TREE_OPERAND (lhs, 1);
5264 rl_arg = TREE_OPERAND (rhs, 0);
5265 rr_arg = TREE_OPERAND (rhs, 1);
5267 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5268 if (simple_operand_p (ll_arg)
5269 && simple_operand_p (lr_arg))
5271 if (operand_equal_p (ll_arg, rl_arg, 0)
5272 && operand_equal_p (lr_arg, rr_arg, 0))
5274 result = combine_comparisons (loc, code, lcode, rcode,
5275 truth_type, ll_arg, lr_arg);
5276 if (result)
5277 return result;
5279 else if (operand_equal_p (ll_arg, rr_arg, 0)
5280 && operand_equal_p (lr_arg, rl_arg, 0))
5282 result = combine_comparisons (loc, code, lcode,
5283 swap_tree_comparison (rcode),
5284 truth_type, ll_arg, lr_arg);
5285 if (result)
5286 return result;
5290 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5291 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5293 /* If the RHS can be evaluated unconditionally and its operands are
5294 simple, it wins to evaluate the RHS unconditionally on machines
5295 with expensive branches. In this case, this isn't a comparison
5296 that can be merged. */
5298 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5299 false) >= 2
5300 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5301 && simple_operand_p (rl_arg)
5302 && simple_operand_p (rr_arg))
5304 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5305 if (code == TRUTH_OR_EXPR
5306 && lcode == NE_EXPR && integer_zerop (lr_arg)
5307 && rcode == NE_EXPR && integer_zerop (rr_arg)
5308 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5309 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5310 return build2_loc (loc, NE_EXPR, truth_type,
5311 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5312 ll_arg, rl_arg),
5313 build_int_cst (TREE_TYPE (ll_arg), 0));
5315 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5316 if (code == TRUTH_AND_EXPR
5317 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5318 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5319 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5320 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5321 return build2_loc (loc, EQ_EXPR, truth_type,
5322 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5323 ll_arg, rl_arg),
5324 build_int_cst (TREE_TYPE (ll_arg), 0));
5327 /* See if the comparisons can be merged. Then get all the parameters for
5328 each side. */
5330 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5331 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5332 return 0;
5334 volatilep = 0;
5335 ll_inner = decode_field_reference (loc, ll_arg,
5336 &ll_bitsize, &ll_bitpos, &ll_mode,
5337 &ll_unsignedp, &volatilep, &ll_mask,
5338 &ll_and_mask);
5339 lr_inner = decode_field_reference (loc, lr_arg,
5340 &lr_bitsize, &lr_bitpos, &lr_mode,
5341 &lr_unsignedp, &volatilep, &lr_mask,
5342 &lr_and_mask);
5343 rl_inner = decode_field_reference (loc, rl_arg,
5344 &rl_bitsize, &rl_bitpos, &rl_mode,
5345 &rl_unsignedp, &volatilep, &rl_mask,
5346 &rl_and_mask);
5347 rr_inner = decode_field_reference (loc, rr_arg,
5348 &rr_bitsize, &rr_bitpos, &rr_mode,
5349 &rr_unsignedp, &volatilep, &rr_mask,
5350 &rr_and_mask);
5352 /* It must be true that the inner operation on the lhs of each
5353 comparison must be the same if we are to be able to do anything.
5354 Then see if we have constants. If not, the same must be true for
5355 the rhs's. */
5356 if (volatilep || ll_inner == 0 || rl_inner == 0
5357 || ! operand_equal_p (ll_inner, rl_inner, 0))
5358 return 0;
5360 if (TREE_CODE (lr_arg) == INTEGER_CST
5361 && TREE_CODE (rr_arg) == INTEGER_CST)
5362 l_const = lr_arg, r_const = rr_arg;
5363 else if (lr_inner == 0 || rr_inner == 0
5364 || ! operand_equal_p (lr_inner, rr_inner, 0))
5365 return 0;
5366 else
5367 l_const = r_const = 0;
5369 /* If either comparison code is not correct for our logical operation,
5370 fail. However, we can convert a one-bit comparison against zero into
5371 the opposite comparison against that bit being set in the field. */
5373 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5374 if (lcode != wanted_code)
5376 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5378 /* Make the left operand unsigned, since we are only interested
5379 in the value of one bit. Otherwise we are doing the wrong
5380 thing below. */
5381 ll_unsignedp = 1;
5382 l_const = ll_mask;
5384 else
5385 return 0;
5388 /* This is analogous to the code for l_const above. */
5389 if (rcode != wanted_code)
5391 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5393 rl_unsignedp = 1;
5394 r_const = rl_mask;
5396 else
5397 return 0;
5400 /* See if we can find a mode that contains both fields being compared on
5401 the left. If we can't, fail. Otherwise, update all constants and masks
5402 to be relative to a field of that size. */
5403 first_bit = MIN (ll_bitpos, rl_bitpos);
5404 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5405 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5406 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5407 volatilep);
5408 if (lnmode == VOIDmode)
5409 return 0;
5411 lnbitsize = GET_MODE_BITSIZE (lnmode);
5412 lnbitpos = first_bit & ~ (lnbitsize - 1);
5413 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5414 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5416 if (BYTES_BIG_ENDIAN)
5418 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5419 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5422 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5423 size_int (xll_bitpos));
5424 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5425 size_int (xrl_bitpos));
5427 if (l_const)
5429 l_const = fold_convert_loc (loc, lntype, l_const);
5430 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5431 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5432 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5433 fold_build1_loc (loc, BIT_NOT_EXPR,
5434 lntype, ll_mask))))
5436 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5438 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5441 if (r_const)
5443 r_const = fold_convert_loc (loc, lntype, r_const);
5444 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5445 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5446 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5447 fold_build1_loc (loc, BIT_NOT_EXPR,
5448 lntype, rl_mask))))
5450 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5452 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5456 /* If the right sides are not constant, do the same for it. Also,
5457 disallow this optimization if a size or signedness mismatch occurs
5458 between the left and right sides. */
5459 if (l_const == 0)
5461 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5462 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5463 /* Make sure the two fields on the right
5464 correspond to the left without being swapped. */
5465 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5466 return 0;
5468 first_bit = MIN (lr_bitpos, rr_bitpos);
5469 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5470 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5471 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5472 volatilep);
5473 if (rnmode == VOIDmode)
5474 return 0;
5476 rnbitsize = GET_MODE_BITSIZE (rnmode);
5477 rnbitpos = first_bit & ~ (rnbitsize - 1);
5478 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5479 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5481 if (BYTES_BIG_ENDIAN)
5483 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5484 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5487 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5488 rntype, lr_mask),
5489 size_int (xlr_bitpos));
5490 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5491 rntype, rr_mask),
5492 size_int (xrr_bitpos));
5494 /* Make a mask that corresponds to both fields being compared.
5495 Do this for both items being compared. If the operands are the
5496 same size and the bits being compared are in the same position
5497 then we can do this by masking both and comparing the masked
5498 results. */
5499 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5500 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5501 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5503 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5504 ll_unsignedp || rl_unsignedp);
5505 if (! all_ones_mask_p (ll_mask, lnbitsize))
5506 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5508 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5509 lr_unsignedp || rr_unsignedp);
5510 if (! all_ones_mask_p (lr_mask, rnbitsize))
5511 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5513 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5516 /* There is still another way we can do something: If both pairs of
5517 fields being compared are adjacent, we may be able to make a wider
5518 field containing them both.
5520 Note that we still must mask the lhs/rhs expressions. Furthermore,
5521 the mask must be shifted to account for the shift done by
5522 make_bit_field_ref. */
5523 if ((ll_bitsize + ll_bitpos == rl_bitpos
5524 && lr_bitsize + lr_bitpos == rr_bitpos)
5525 || (ll_bitpos == rl_bitpos + rl_bitsize
5526 && lr_bitpos == rr_bitpos + rr_bitsize))
5528 tree type;
5530 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5531 ll_bitsize + rl_bitsize,
5532 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5533 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5534 lr_bitsize + rr_bitsize,
5535 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5537 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5538 size_int (MIN (xll_bitpos, xrl_bitpos)));
5539 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5540 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5542 /* Convert to the smaller type before masking out unwanted bits. */
5543 type = lntype;
5544 if (lntype != rntype)
5546 if (lnbitsize > rnbitsize)
5548 lhs = fold_convert_loc (loc, rntype, lhs);
5549 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5550 type = rntype;
5552 else if (lnbitsize < rnbitsize)
5554 rhs = fold_convert_loc (loc, lntype, rhs);
5555 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5556 type = lntype;
5560 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5561 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5563 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5564 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5566 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5569 return 0;
5572 /* Handle the case of comparisons with constants. If there is something in
5573 common between the masks, those bits of the constants must be the same.
5574 If not, the condition is always false. Test for this to avoid generating
5575 incorrect code below. */
5576 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5577 if (! integer_zerop (result)
5578 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5579 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5581 if (wanted_code == NE_EXPR)
5583 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5584 return constant_boolean_node (true, truth_type);
5586 else
5588 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5589 return constant_boolean_node (false, truth_type);
5593 /* Construct the expression we will return. First get the component
5594 reference we will make. Unless the mask is all ones the width of
5595 that field, perform the mask operation. Then compare with the
5596 merged constant. */
5597 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5598 ll_unsignedp || rl_unsignedp);
5600 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5601 if (! all_ones_mask_p (ll_mask, lnbitsize))
5602 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5604 return build2_loc (loc, wanted_code, truth_type, result,
5605 const_binop (BIT_IOR_EXPR, l_const, r_const));
5608 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5609 constant. */
5611 static tree
5612 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5613 tree op0, tree op1)
5615 tree arg0 = op0;
5616 enum tree_code op_code;
5617 tree comp_const;
5618 tree minmax_const;
5619 int consts_equal, consts_lt;
5620 tree inner;
5622 STRIP_SIGN_NOPS (arg0);
5624 op_code = TREE_CODE (arg0);
5625 minmax_const = TREE_OPERAND (arg0, 1);
5626 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5627 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5628 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5629 inner = TREE_OPERAND (arg0, 0);
5631 /* If something does not permit us to optimize, return the original tree. */
5632 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5633 || TREE_CODE (comp_const) != INTEGER_CST
5634 || TREE_OVERFLOW (comp_const)
5635 || TREE_CODE (minmax_const) != INTEGER_CST
5636 || TREE_OVERFLOW (minmax_const))
5637 return NULL_TREE;
5639 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5640 and GT_EXPR, doing the rest with recursive calls using logical
5641 simplifications. */
5642 switch (code)
5644 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5646 tree tem
5647 = optimize_minmax_comparison (loc,
5648 invert_tree_comparison (code, false),
5649 type, op0, op1);
5650 if (tem)
5651 return invert_truthvalue_loc (loc, tem);
5652 return NULL_TREE;
5655 case GE_EXPR:
5656 return
5657 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5658 optimize_minmax_comparison
5659 (loc, EQ_EXPR, type, arg0, comp_const),
5660 optimize_minmax_comparison
5661 (loc, GT_EXPR, type, arg0, comp_const));
5663 case EQ_EXPR:
5664 if (op_code == MAX_EXPR && consts_equal)
5665 /* MAX (X, 0) == 0 -> X <= 0 */
5666 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5668 else if (op_code == MAX_EXPR && consts_lt)
5669 /* MAX (X, 0) == 5 -> X == 5 */
5670 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5672 else if (op_code == MAX_EXPR)
5673 /* MAX (X, 0) == -1 -> false */
5674 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5676 else if (consts_equal)
5677 /* MIN (X, 0) == 0 -> X >= 0 */
5678 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5680 else if (consts_lt)
5681 /* MIN (X, 0) == 5 -> false */
5682 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5684 else
5685 /* MIN (X, 0) == -1 -> X == -1 */
5686 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5688 case GT_EXPR:
5689 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5690 /* MAX (X, 0) > 0 -> X > 0
5691 MAX (X, 0) > 5 -> X > 5 */
5692 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5694 else if (op_code == MAX_EXPR)
5695 /* MAX (X, 0) > -1 -> true */
5696 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5698 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5699 /* MIN (X, 0) > 0 -> false
5700 MIN (X, 0) > 5 -> false */
5701 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5703 else
5704 /* MIN (X, 0) > -1 -> X > -1 */
5705 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5707 default:
5708 return NULL_TREE;
5712 /* T is an integer expression that is being multiplied, divided, or taken a
5713 modulus (CODE says which and what kind of divide or modulus) by a
5714 constant C. See if we can eliminate that operation by folding it with
5715 other operations already in T. WIDE_TYPE, if non-null, is a type that
5716 should be used for the computation if wider than our type.
5718 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5719 (X * 2) + (Y * 4). We must, however, be assured that either the original
5720 expression would not overflow or that overflow is undefined for the type
5721 in the language in question.
5723 If we return a non-null expression, it is an equivalent form of the
5724 original computation, but need not be in the original type.
5726 We set *STRICT_OVERFLOW_P to true if the return values depends on
5727 signed overflow being undefined. Otherwise we do not change
5728 *STRICT_OVERFLOW_P. */
5730 static tree
5731 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5732 bool *strict_overflow_p)
5734 /* To avoid exponential search depth, refuse to allow recursion past
5735 three levels. Beyond that (1) it's highly unlikely that we'll find
5736 something interesting and (2) we've probably processed it before
5737 when we built the inner expression. */
5739 static int depth;
5740 tree ret;
5742 if (depth > 3)
5743 return NULL;
5745 depth++;
5746 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5747 depth--;
5749 return ret;
5752 static tree
5753 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5754 bool *strict_overflow_p)
5756 tree type = TREE_TYPE (t);
5757 enum tree_code tcode = TREE_CODE (t);
5758 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5759 > GET_MODE_SIZE (TYPE_MODE (type)))
5760 ? wide_type : type);
5761 tree t1, t2;
5762 int same_p = tcode == code;
5763 tree op0 = NULL_TREE, op1 = NULL_TREE;
5764 bool sub_strict_overflow_p;
5766 /* Don't deal with constants of zero here; they confuse the code below. */
5767 if (integer_zerop (c))
5768 return NULL_TREE;
5770 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5771 op0 = TREE_OPERAND (t, 0);
5773 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5774 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5776 /* Note that we need not handle conditional operations here since fold
5777 already handles those cases. So just do arithmetic here. */
5778 switch (tcode)
5780 case INTEGER_CST:
5781 /* For a constant, we can always simplify if we are a multiply
5782 or (for divide and modulus) if it is a multiple of our constant. */
5783 if (code == MULT_EXPR
5784 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5785 return const_binop (code, fold_convert (ctype, t),
5786 fold_convert (ctype, c));
5787 break;
5789 CASE_CONVERT: case NON_LVALUE_EXPR:
5790 /* If op0 is an expression ... */
5791 if ((COMPARISON_CLASS_P (op0)
5792 || UNARY_CLASS_P (op0)
5793 || BINARY_CLASS_P (op0)
5794 || VL_EXP_CLASS_P (op0)
5795 || EXPRESSION_CLASS_P (op0))
5796 /* ... and has wrapping overflow, and its type is smaller
5797 than ctype, then we cannot pass through as widening. */
5798 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5799 && (TYPE_PRECISION (ctype)
5800 > TYPE_PRECISION (TREE_TYPE (op0))))
5801 /* ... or this is a truncation (t is narrower than op0),
5802 then we cannot pass through this narrowing. */
5803 || (TYPE_PRECISION (type)
5804 < TYPE_PRECISION (TREE_TYPE (op0)))
5805 /* ... or signedness changes for division or modulus,
5806 then we cannot pass through this conversion. */
5807 || (code != MULT_EXPR
5808 && (TYPE_UNSIGNED (ctype)
5809 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5810 /* ... or has undefined overflow while the converted to
5811 type has not, we cannot do the operation in the inner type
5812 as that would introduce undefined overflow. */
5813 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5814 && !TYPE_OVERFLOW_UNDEFINED (type))))
5815 break;
5817 /* Pass the constant down and see if we can make a simplification. If
5818 we can, replace this expression with the inner simplification for
5819 possible later conversion to our or some other type. */
5820 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5821 && TREE_CODE (t2) == INTEGER_CST
5822 && !TREE_OVERFLOW (t2)
5823 && (0 != (t1 = extract_muldiv (op0, t2, code,
5824 code == MULT_EXPR
5825 ? ctype : NULL_TREE,
5826 strict_overflow_p))))
5827 return t1;
5828 break;
5830 case ABS_EXPR:
5831 /* If widening the type changes it from signed to unsigned, then we
5832 must avoid building ABS_EXPR itself as unsigned. */
5833 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5835 tree cstype = (*signed_type_for) (ctype);
5836 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5837 != 0)
5839 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5840 return fold_convert (ctype, t1);
5842 break;
5844 /* If the constant is negative, we cannot simplify this. */
5845 if (tree_int_cst_sgn (c) == -1)
5846 break;
5847 /* FALLTHROUGH */
5848 case NEGATE_EXPR:
5849 /* For division and modulus, type can't be unsigned, as e.g.
5850 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5851 For signed types, even with wrapping overflow, this is fine. */
5852 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5853 break;
5854 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5855 != 0)
5856 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5857 break;
5859 case MIN_EXPR: case MAX_EXPR:
5860 /* If widening the type changes the signedness, then we can't perform
5861 this optimization as that changes the result. */
5862 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5863 break;
5865 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5866 sub_strict_overflow_p = false;
5867 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5868 &sub_strict_overflow_p)) != 0
5869 && (t2 = extract_muldiv (op1, c, code, wide_type,
5870 &sub_strict_overflow_p)) != 0)
5872 if (tree_int_cst_sgn (c) < 0)
5873 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5874 if (sub_strict_overflow_p)
5875 *strict_overflow_p = true;
5876 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5877 fold_convert (ctype, t2));
5879 break;
5881 case LSHIFT_EXPR: case RSHIFT_EXPR:
5882 /* If the second operand is constant, this is a multiplication
5883 or floor division, by a power of two, so we can treat it that
5884 way unless the multiplier or divisor overflows. Signed
5885 left-shift overflow is implementation-defined rather than
5886 undefined in C90, so do not convert signed left shift into
5887 multiplication. */
5888 if (TREE_CODE (op1) == INTEGER_CST
5889 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5890 /* const_binop may not detect overflow correctly,
5891 so check for it explicitly here. */
5892 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5893 && TREE_INT_CST_HIGH (op1) == 0
5894 && 0 != (t1 = fold_convert (ctype,
5895 const_binop (LSHIFT_EXPR,
5896 size_one_node,
5897 op1)))
5898 && !TREE_OVERFLOW (t1))
5899 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5900 ? MULT_EXPR : FLOOR_DIV_EXPR,
5901 ctype,
5902 fold_convert (ctype, op0),
5903 t1),
5904 c, code, wide_type, strict_overflow_p);
5905 break;
5907 case PLUS_EXPR: case MINUS_EXPR:
5908 /* See if we can eliminate the operation on both sides. If we can, we
5909 can return a new PLUS or MINUS. If we can't, the only remaining
5910 cases where we can do anything are if the second operand is a
5911 constant. */
5912 sub_strict_overflow_p = false;
5913 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5914 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5915 if (t1 != 0 && t2 != 0
5916 && (code == MULT_EXPR
5917 /* If not multiplication, we can only do this if both operands
5918 are divisible by c. */
5919 || (multiple_of_p (ctype, op0, c)
5920 && multiple_of_p (ctype, op1, c))))
5922 if (sub_strict_overflow_p)
5923 *strict_overflow_p = true;
5924 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5925 fold_convert (ctype, t2));
5928 /* If this was a subtraction, negate OP1 and set it to be an addition.
5929 This simplifies the logic below. */
5930 if (tcode == MINUS_EXPR)
5932 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5933 /* If OP1 was not easily negatable, the constant may be OP0. */
5934 if (TREE_CODE (op0) == INTEGER_CST)
5936 tree tem = op0;
5937 op0 = op1;
5938 op1 = tem;
5939 tem = t1;
5940 t1 = t2;
5941 t2 = tem;
5945 if (TREE_CODE (op1) != INTEGER_CST)
5946 break;
5948 /* If either OP1 or C are negative, this optimization is not safe for
5949 some of the division and remainder types while for others we need
5950 to change the code. */
5951 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5953 if (code == CEIL_DIV_EXPR)
5954 code = FLOOR_DIV_EXPR;
5955 else if (code == FLOOR_DIV_EXPR)
5956 code = CEIL_DIV_EXPR;
5957 else if (code != MULT_EXPR
5958 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5959 break;
5962 /* If it's a multiply or a division/modulus operation of a multiple
5963 of our constant, do the operation and verify it doesn't overflow. */
5964 if (code == MULT_EXPR
5965 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5967 op1 = const_binop (code, fold_convert (ctype, op1),
5968 fold_convert (ctype, c));
5969 /* We allow the constant to overflow with wrapping semantics. */
5970 if (op1 == 0
5971 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5972 break;
5974 else
5975 break;
5977 /* If we have an unsigned type, we cannot widen the operation since it
5978 will change the result if the original computation overflowed. */
5979 if (TYPE_UNSIGNED (ctype) && ctype != type)
5980 break;
5982 /* If we were able to eliminate our operation from the first side,
5983 apply our operation to the second side and reform the PLUS. */
5984 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5985 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5987 /* The last case is if we are a multiply. In that case, we can
5988 apply the distributive law to commute the multiply and addition
5989 if the multiplication of the constants doesn't overflow
5990 and overflow is defined. With undefined overflow
5991 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5992 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5993 return fold_build2 (tcode, ctype,
5994 fold_build2 (code, ctype,
5995 fold_convert (ctype, op0),
5996 fold_convert (ctype, c)),
5997 op1);
5999 break;
6001 case MULT_EXPR:
6002 /* We have a special case here if we are doing something like
6003 (C * 8) % 4 since we know that's zero. */
6004 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6005 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6006 /* If the multiplication can overflow we cannot optimize this. */
6007 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6008 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6009 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6011 *strict_overflow_p = true;
6012 return omit_one_operand (type, integer_zero_node, op0);
6015 /* ... fall through ... */
6017 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6018 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6019 /* If we can extract our operation from the LHS, do so and return a
6020 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6021 do something only if the second operand is a constant. */
6022 if (same_p
6023 && (t1 = extract_muldiv (op0, c, code, wide_type,
6024 strict_overflow_p)) != 0)
6025 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6026 fold_convert (ctype, op1));
6027 else if (tcode == MULT_EXPR && code == MULT_EXPR
6028 && (t1 = extract_muldiv (op1, c, code, wide_type,
6029 strict_overflow_p)) != 0)
6030 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6031 fold_convert (ctype, t1));
6032 else if (TREE_CODE (op1) != INTEGER_CST)
6033 return 0;
6035 /* If these are the same operation types, we can associate them
6036 assuming no overflow. */
6037 if (tcode == code)
6039 double_int mul;
6040 bool overflow_p;
6041 unsigned prec = TYPE_PRECISION (ctype);
6042 bool uns = TYPE_UNSIGNED (ctype);
6043 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6044 double_int dic = tree_to_double_int (c).ext (prec, uns);
6045 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6046 overflow_p = ((!uns && overflow_p)
6047 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6048 if (!double_int_fits_to_tree_p (ctype, mul)
6049 && ((uns && tcode != MULT_EXPR) || !uns))
6050 overflow_p = 1;
6051 if (!overflow_p)
6052 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6053 double_int_to_tree (ctype, mul));
6056 /* If these operations "cancel" each other, we have the main
6057 optimizations of this pass, which occur when either constant is a
6058 multiple of the other, in which case we replace this with either an
6059 operation or CODE or TCODE.
6061 If we have an unsigned type, we cannot do this since it will change
6062 the result if the original computation overflowed. */
6063 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6064 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6065 || (tcode == MULT_EXPR
6066 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6067 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6068 && code != MULT_EXPR)))
6070 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6072 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6073 *strict_overflow_p = true;
6074 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6075 fold_convert (ctype,
6076 const_binop (TRUNC_DIV_EXPR,
6077 op1, c)));
6079 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6081 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6082 *strict_overflow_p = true;
6083 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6084 fold_convert (ctype,
6085 const_binop (TRUNC_DIV_EXPR,
6086 c, op1)));
6089 break;
6091 default:
6092 break;
6095 return 0;
6098 /* Return a node which has the indicated constant VALUE (either 0 or
6099 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6100 and is of the indicated TYPE. */
6102 tree
6103 constant_boolean_node (bool value, tree type)
6105 if (type == integer_type_node)
6106 return value ? integer_one_node : integer_zero_node;
6107 else if (type == boolean_type_node)
6108 return value ? boolean_true_node : boolean_false_node;
6109 else if (TREE_CODE (type) == VECTOR_TYPE)
6110 return build_vector_from_val (type,
6111 build_int_cst (TREE_TYPE (type),
6112 value ? -1 : 0));
6113 else
6114 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6118 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6119 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6120 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6121 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6122 COND is the first argument to CODE; otherwise (as in the example
6123 given here), it is the second argument. TYPE is the type of the
6124 original expression. Return NULL_TREE if no simplification is
6125 possible. */
6127 static tree
6128 fold_binary_op_with_conditional_arg (location_t loc,
6129 enum tree_code code,
6130 tree type, tree op0, tree op1,
6131 tree cond, tree arg, int cond_first_p)
6133 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6134 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6135 tree test, true_value, false_value;
6136 tree lhs = NULL_TREE;
6137 tree rhs = NULL_TREE;
6138 enum tree_code cond_code = COND_EXPR;
6140 if (TREE_CODE (cond) == COND_EXPR
6141 || TREE_CODE (cond) == VEC_COND_EXPR)
6143 test = TREE_OPERAND (cond, 0);
6144 true_value = TREE_OPERAND (cond, 1);
6145 false_value = TREE_OPERAND (cond, 2);
6146 /* If this operand throws an expression, then it does not make
6147 sense to try to perform a logical or arithmetic operation
6148 involving it. */
6149 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6150 lhs = true_value;
6151 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6152 rhs = false_value;
6154 else
6156 tree testtype = TREE_TYPE (cond);
6157 test = cond;
6158 true_value = constant_boolean_node (true, testtype);
6159 false_value = constant_boolean_node (false, testtype);
6162 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6163 cond_code = VEC_COND_EXPR;
6165 /* This transformation is only worthwhile if we don't have to wrap ARG
6166 in a SAVE_EXPR and the operation can be simplified without recursing
6167 on at least one of the branches once its pushed inside the COND_EXPR. */
6168 if (!TREE_CONSTANT (arg)
6169 && (TREE_SIDE_EFFECTS (arg)
6170 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6171 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6172 return NULL_TREE;
6174 arg = fold_convert_loc (loc, arg_type, arg);
6175 if (lhs == 0)
6177 true_value = fold_convert_loc (loc, cond_type, true_value);
6178 if (cond_first_p)
6179 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6180 else
6181 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6183 if (rhs == 0)
6185 false_value = fold_convert_loc (loc, cond_type, false_value);
6186 if (cond_first_p)
6187 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6188 else
6189 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6192 /* Check that we have simplified at least one of the branches. */
6193 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6194 return NULL_TREE;
6196 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6200 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6202 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6203 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6204 ADDEND is the same as X.
6206 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6207 and finite. The problematic cases are when X is zero, and its mode
6208 has signed zeros. In the case of rounding towards -infinity,
6209 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6210 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6212 bool
6213 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6215 if (!real_zerop (addend))
6216 return false;
6218 /* Don't allow the fold with -fsignaling-nans. */
6219 if (HONOR_SNANS (TYPE_MODE (type)))
6220 return false;
6222 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6223 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6224 return true;
6226 /* In a vector or complex, we would need to check the sign of all zeros. */
6227 if (TREE_CODE (addend) != REAL_CST)
6228 return false;
6230 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6231 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6232 negate = !negate;
6234 /* The mode has signed zeros, and we have to honor their sign.
6235 In this situation, there is only one case we can return true for.
6236 X - 0 is the same as X unless rounding towards -infinity is
6237 supported. */
6238 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6241 /* Subroutine of fold() that checks comparisons of built-in math
6242 functions against real constants.
6244 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6245 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6246 is the type of the result and ARG0 and ARG1 are the operands of the
6247 comparison. ARG1 must be a TREE_REAL_CST.
6249 The function returns the constant folded tree if a simplification
6250 can be made, and NULL_TREE otherwise. */
6252 static tree
6253 fold_mathfn_compare (location_t loc,
6254 enum built_in_function fcode, enum tree_code code,
6255 tree type, tree arg0, tree arg1)
6257 REAL_VALUE_TYPE c;
6259 if (BUILTIN_SQRT_P (fcode))
6261 tree arg = CALL_EXPR_ARG (arg0, 0);
6262 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6264 c = TREE_REAL_CST (arg1);
6265 if (REAL_VALUE_NEGATIVE (c))
6267 /* sqrt(x) < y is always false, if y is negative. */
6268 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6269 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6271 /* sqrt(x) > y is always true, if y is negative and we
6272 don't care about NaNs, i.e. negative values of x. */
6273 if (code == NE_EXPR || !HONOR_NANS (mode))
6274 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6276 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6277 return fold_build2_loc (loc, GE_EXPR, type, arg,
6278 build_real (TREE_TYPE (arg), dconst0));
6280 else if (code == GT_EXPR || code == GE_EXPR)
6282 REAL_VALUE_TYPE c2;
6284 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6285 real_convert (&c2, mode, &c2);
6287 if (REAL_VALUE_ISINF (c2))
6289 /* sqrt(x) > y is x == +Inf, when y is very large. */
6290 if (HONOR_INFINITIES (mode))
6291 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6292 build_real (TREE_TYPE (arg), c2));
6294 /* sqrt(x) > y is always false, when y is very large
6295 and we don't care about infinities. */
6296 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6299 /* sqrt(x) > c is the same as x > c*c. */
6300 return fold_build2_loc (loc, code, type, arg,
6301 build_real (TREE_TYPE (arg), c2));
6303 else if (code == LT_EXPR || code == LE_EXPR)
6305 REAL_VALUE_TYPE c2;
6307 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6308 real_convert (&c2, mode, &c2);
6310 if (REAL_VALUE_ISINF (c2))
6312 /* sqrt(x) < y is always true, when y is a very large
6313 value and we don't care about NaNs or Infinities. */
6314 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6315 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6317 /* sqrt(x) < y is x != +Inf when y is very large and we
6318 don't care about NaNs. */
6319 if (! HONOR_NANS (mode))
6320 return fold_build2_loc (loc, NE_EXPR, type, arg,
6321 build_real (TREE_TYPE (arg), c2));
6323 /* sqrt(x) < y is x >= 0 when y is very large and we
6324 don't care about Infinities. */
6325 if (! HONOR_INFINITIES (mode))
6326 return fold_build2_loc (loc, GE_EXPR, type, arg,
6327 build_real (TREE_TYPE (arg), dconst0));
6329 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6330 arg = save_expr (arg);
6331 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6332 fold_build2_loc (loc, GE_EXPR, type, arg,
6333 build_real (TREE_TYPE (arg),
6334 dconst0)),
6335 fold_build2_loc (loc, NE_EXPR, type, arg,
6336 build_real (TREE_TYPE (arg),
6337 c2)));
6340 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6341 if (! HONOR_NANS (mode))
6342 return fold_build2_loc (loc, code, type, arg,
6343 build_real (TREE_TYPE (arg), c2));
6345 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6346 arg = save_expr (arg);
6347 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6348 fold_build2_loc (loc, GE_EXPR, type, arg,
6349 build_real (TREE_TYPE (arg),
6350 dconst0)),
6351 fold_build2_loc (loc, code, type, arg,
6352 build_real (TREE_TYPE (arg),
6353 c2)));
6357 return NULL_TREE;
6360 /* Subroutine of fold() that optimizes comparisons against Infinities,
6361 either +Inf or -Inf.
6363 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6364 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6365 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6367 The function returns the constant folded tree if a simplification
6368 can be made, and NULL_TREE otherwise. */
6370 static tree
6371 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6372 tree arg0, tree arg1)
6374 enum machine_mode mode;
6375 REAL_VALUE_TYPE max;
6376 tree temp;
6377 bool neg;
6379 mode = TYPE_MODE (TREE_TYPE (arg0));
6381 /* For negative infinity swap the sense of the comparison. */
6382 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6383 if (neg)
6384 code = swap_tree_comparison (code);
6386 switch (code)
6388 case GT_EXPR:
6389 /* x > +Inf is always false, if with ignore sNANs. */
6390 if (HONOR_SNANS (mode))
6391 return NULL_TREE;
6392 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6394 case LE_EXPR:
6395 /* x <= +Inf is always true, if we don't case about NaNs. */
6396 if (! HONOR_NANS (mode))
6397 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6399 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6400 arg0 = save_expr (arg0);
6401 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6403 case EQ_EXPR:
6404 case GE_EXPR:
6405 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6406 real_maxval (&max, neg, mode);
6407 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6408 arg0, build_real (TREE_TYPE (arg0), max));
6410 case LT_EXPR:
6411 /* x < +Inf is always equal to x <= DBL_MAX. */
6412 real_maxval (&max, neg, mode);
6413 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6414 arg0, build_real (TREE_TYPE (arg0), max));
6416 case NE_EXPR:
6417 /* x != +Inf is always equal to !(x > DBL_MAX). */
6418 real_maxval (&max, neg, mode);
6419 if (! HONOR_NANS (mode))
6420 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6421 arg0, build_real (TREE_TYPE (arg0), max));
6423 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6424 arg0, build_real (TREE_TYPE (arg0), max));
6425 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6427 default:
6428 break;
6431 return NULL_TREE;
6434 /* Subroutine of fold() that optimizes comparisons of a division by
6435 a nonzero integer constant against an integer constant, i.e.
6436 X/C1 op C2.
6438 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6439 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6440 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6442 The function returns the constant folded tree if a simplification
6443 can be made, and NULL_TREE otherwise. */
6445 static tree
6446 fold_div_compare (location_t loc,
6447 enum tree_code code, tree type, tree arg0, tree arg1)
6449 tree prod, tmp, hi, lo;
6450 tree arg00 = TREE_OPERAND (arg0, 0);
6451 tree arg01 = TREE_OPERAND (arg0, 1);
6452 double_int val;
6453 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6454 bool neg_overflow;
6455 bool overflow;
6457 /* We have to do this the hard way to detect unsigned overflow.
6458 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6459 val = TREE_INT_CST (arg01)
6460 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6461 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6462 neg_overflow = false;
6464 if (unsigned_p)
6466 tmp = int_const_binop (MINUS_EXPR, arg01,
6467 build_int_cst (TREE_TYPE (arg01), 1));
6468 lo = prod;
6470 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6471 val = TREE_INT_CST (prod)
6472 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6473 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6474 -1, overflow | TREE_OVERFLOW (prod));
6476 else if (tree_int_cst_sgn (arg01) >= 0)
6478 tmp = int_const_binop (MINUS_EXPR, arg01,
6479 build_int_cst (TREE_TYPE (arg01), 1));
6480 switch (tree_int_cst_sgn (arg1))
6482 case -1:
6483 neg_overflow = true;
6484 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6485 hi = prod;
6486 break;
6488 case 0:
6489 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6490 hi = tmp;
6491 break;
6493 case 1:
6494 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6495 lo = prod;
6496 break;
6498 default:
6499 gcc_unreachable ();
6502 else
6504 /* A negative divisor reverses the relational operators. */
6505 code = swap_tree_comparison (code);
6507 tmp = int_const_binop (PLUS_EXPR, arg01,
6508 build_int_cst (TREE_TYPE (arg01), 1));
6509 switch (tree_int_cst_sgn (arg1))
6511 case -1:
6512 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6513 lo = prod;
6514 break;
6516 case 0:
6517 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6518 lo = tmp;
6519 break;
6521 case 1:
6522 neg_overflow = true;
6523 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6524 hi = prod;
6525 break;
6527 default:
6528 gcc_unreachable ();
6532 switch (code)
6534 case EQ_EXPR:
6535 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6536 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6537 if (TREE_OVERFLOW (hi))
6538 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6539 if (TREE_OVERFLOW (lo))
6540 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6541 return build_range_check (loc, type, arg00, 1, lo, hi);
6543 case NE_EXPR:
6544 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6545 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6546 if (TREE_OVERFLOW (hi))
6547 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6548 if (TREE_OVERFLOW (lo))
6549 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6550 return build_range_check (loc, type, arg00, 0, lo, hi);
6552 case LT_EXPR:
6553 if (TREE_OVERFLOW (lo))
6555 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6556 return omit_one_operand_loc (loc, type, tmp, arg00);
6558 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6560 case LE_EXPR:
6561 if (TREE_OVERFLOW (hi))
6563 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6564 return omit_one_operand_loc (loc, type, tmp, arg00);
6566 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6568 case GT_EXPR:
6569 if (TREE_OVERFLOW (hi))
6571 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6572 return omit_one_operand_loc (loc, type, tmp, arg00);
6574 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6576 case GE_EXPR:
6577 if (TREE_OVERFLOW (lo))
6579 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6580 return omit_one_operand_loc (loc, type, tmp, arg00);
6582 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6584 default:
6585 break;
6588 return NULL_TREE;
6592 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6593 equality/inequality test, then return a simplified form of the test
6594 using a sign testing. Otherwise return NULL. TYPE is the desired
6595 result type. */
6597 static tree
6598 fold_single_bit_test_into_sign_test (location_t loc,
6599 enum tree_code code, tree arg0, tree arg1,
6600 tree result_type)
6602 /* If this is testing a single bit, we can optimize the test. */
6603 if ((code == NE_EXPR || code == EQ_EXPR)
6604 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6605 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6607 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6608 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6609 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6611 if (arg00 != NULL_TREE
6612 /* This is only a win if casting to a signed type is cheap,
6613 i.e. when arg00's type is not a partial mode. */
6614 && TYPE_PRECISION (TREE_TYPE (arg00))
6615 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6617 tree stype = signed_type_for (TREE_TYPE (arg00));
6618 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6619 result_type,
6620 fold_convert_loc (loc, stype, arg00),
6621 build_int_cst (stype, 0));
6625 return NULL_TREE;
6628 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6629 equality/inequality test, then return a simplified form of
6630 the test using shifts and logical operations. Otherwise return
6631 NULL. TYPE is the desired result type. */
6633 tree
6634 fold_single_bit_test (location_t loc, enum tree_code code,
6635 tree arg0, tree arg1, tree result_type)
6637 /* If this is testing a single bit, we can optimize the test. */
6638 if ((code == NE_EXPR || code == EQ_EXPR)
6639 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6640 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6642 tree inner = TREE_OPERAND (arg0, 0);
6643 tree type = TREE_TYPE (arg0);
6644 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6645 enum machine_mode operand_mode = TYPE_MODE (type);
6646 int ops_unsigned;
6647 tree signed_type, unsigned_type, intermediate_type;
6648 tree tem, one;
6650 /* First, see if we can fold the single bit test into a sign-bit
6651 test. */
6652 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6653 result_type);
6654 if (tem)
6655 return tem;
6657 /* Otherwise we have (A & C) != 0 where C is a single bit,
6658 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6659 Similarly for (A & C) == 0. */
6661 /* If INNER is a right shift of a constant and it plus BITNUM does
6662 not overflow, adjust BITNUM and INNER. */
6663 if (TREE_CODE (inner) == RSHIFT_EXPR
6664 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6665 && tree_fits_uhwi_p (TREE_OPERAND (inner, 1))
6666 && bitnum < TYPE_PRECISION (type)
6667 && (tree_to_uhwi (TREE_OPERAND (inner, 1))
6668 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6670 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6671 inner = TREE_OPERAND (inner, 0);
6674 /* If we are going to be able to omit the AND below, we must do our
6675 operations as unsigned. If we must use the AND, we have a choice.
6676 Normally unsigned is faster, but for some machines signed is. */
6677 #ifdef LOAD_EXTEND_OP
6678 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6679 && !flag_syntax_only) ? 0 : 1;
6680 #else
6681 ops_unsigned = 1;
6682 #endif
6684 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6685 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6686 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6687 inner = fold_convert_loc (loc, intermediate_type, inner);
6689 if (bitnum != 0)
6690 inner = build2 (RSHIFT_EXPR, intermediate_type,
6691 inner, size_int (bitnum));
6693 one = build_int_cst (intermediate_type, 1);
6695 if (code == EQ_EXPR)
6696 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6698 /* Put the AND last so it can combine with more things. */
6699 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6701 /* Make sure to return the proper type. */
6702 inner = fold_convert_loc (loc, result_type, inner);
6704 return inner;
6706 return NULL_TREE;
6709 /* Check whether we are allowed to reorder operands arg0 and arg1,
6710 such that the evaluation of arg1 occurs before arg0. */
6712 static bool
6713 reorder_operands_p (const_tree arg0, const_tree arg1)
6715 if (! flag_evaluation_order)
6716 return true;
6717 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6718 return true;
6719 return ! TREE_SIDE_EFFECTS (arg0)
6720 && ! TREE_SIDE_EFFECTS (arg1);
6723 /* Test whether it is preferable two swap two operands, ARG0 and
6724 ARG1, for example because ARG0 is an integer constant and ARG1
6725 isn't. If REORDER is true, only recommend swapping if we can
6726 evaluate the operands in reverse order. */
6728 bool
6729 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6731 STRIP_SIGN_NOPS (arg0);
6732 STRIP_SIGN_NOPS (arg1);
6734 if (TREE_CODE (arg1) == INTEGER_CST)
6735 return 0;
6736 if (TREE_CODE (arg0) == INTEGER_CST)
6737 return 1;
6739 if (TREE_CODE (arg1) == REAL_CST)
6740 return 0;
6741 if (TREE_CODE (arg0) == REAL_CST)
6742 return 1;
6744 if (TREE_CODE (arg1) == FIXED_CST)
6745 return 0;
6746 if (TREE_CODE (arg0) == FIXED_CST)
6747 return 1;
6749 if (TREE_CODE (arg1) == COMPLEX_CST)
6750 return 0;
6751 if (TREE_CODE (arg0) == COMPLEX_CST)
6752 return 1;
6754 if (TREE_CONSTANT (arg1))
6755 return 0;
6756 if (TREE_CONSTANT (arg0))
6757 return 1;
6759 if (optimize_function_for_size_p (cfun))
6760 return 0;
6762 if (reorder && flag_evaluation_order
6763 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6764 return 0;
6766 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6767 for commutative and comparison operators. Ensuring a canonical
6768 form allows the optimizers to find additional redundancies without
6769 having to explicitly check for both orderings. */
6770 if (TREE_CODE (arg0) == SSA_NAME
6771 && TREE_CODE (arg1) == SSA_NAME
6772 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6773 return 1;
6775 /* Put SSA_NAMEs last. */
6776 if (TREE_CODE (arg1) == SSA_NAME)
6777 return 0;
6778 if (TREE_CODE (arg0) == SSA_NAME)
6779 return 1;
6781 /* Put variables last. */
6782 if (DECL_P (arg1))
6783 return 0;
6784 if (DECL_P (arg0))
6785 return 1;
6787 return 0;
6790 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6791 ARG0 is extended to a wider type. */
6793 static tree
6794 fold_widened_comparison (location_t loc, enum tree_code code,
6795 tree type, tree arg0, tree arg1)
6797 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6798 tree arg1_unw;
6799 tree shorter_type, outer_type;
6800 tree min, max;
6801 bool above, below;
6803 if (arg0_unw == arg0)
6804 return NULL_TREE;
6805 shorter_type = TREE_TYPE (arg0_unw);
6807 #ifdef HAVE_canonicalize_funcptr_for_compare
6808 /* Disable this optimization if we're casting a function pointer
6809 type on targets that require function pointer canonicalization. */
6810 if (HAVE_canonicalize_funcptr_for_compare
6811 && TREE_CODE (shorter_type) == POINTER_TYPE
6812 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6813 return NULL_TREE;
6814 #endif
6816 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6817 return NULL_TREE;
6819 arg1_unw = get_unwidened (arg1, NULL_TREE);
6821 /* If possible, express the comparison in the shorter mode. */
6822 if ((code == EQ_EXPR || code == NE_EXPR
6823 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6824 && (TREE_TYPE (arg1_unw) == shorter_type
6825 || ((TYPE_PRECISION (shorter_type)
6826 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6827 && (TYPE_UNSIGNED (shorter_type)
6828 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6829 || (TREE_CODE (arg1_unw) == INTEGER_CST
6830 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6831 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6832 && int_fits_type_p (arg1_unw, shorter_type))))
6833 return fold_build2_loc (loc, code, type, arg0_unw,
6834 fold_convert_loc (loc, shorter_type, arg1_unw));
6836 if (TREE_CODE (arg1_unw) != INTEGER_CST
6837 || TREE_CODE (shorter_type) != INTEGER_TYPE
6838 || !int_fits_type_p (arg1_unw, shorter_type))
6839 return NULL_TREE;
6841 /* If we are comparing with the integer that does not fit into the range
6842 of the shorter type, the result is known. */
6843 outer_type = TREE_TYPE (arg1_unw);
6844 min = lower_bound_in_type (outer_type, shorter_type);
6845 max = upper_bound_in_type (outer_type, shorter_type);
6847 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6848 max, arg1_unw));
6849 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6850 arg1_unw, min));
6852 switch (code)
6854 case EQ_EXPR:
6855 if (above || below)
6856 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6857 break;
6859 case NE_EXPR:
6860 if (above || below)
6861 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6862 break;
6864 case LT_EXPR:
6865 case LE_EXPR:
6866 if (above)
6867 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6868 else if (below)
6869 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6871 case GT_EXPR:
6872 case GE_EXPR:
6873 if (above)
6874 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6875 else if (below)
6876 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6878 default:
6879 break;
6882 return NULL_TREE;
6885 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6886 ARG0 just the signedness is changed. */
6888 static tree
6889 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6890 tree arg0, tree arg1)
6892 tree arg0_inner;
6893 tree inner_type, outer_type;
6895 if (!CONVERT_EXPR_P (arg0))
6896 return NULL_TREE;
6898 outer_type = TREE_TYPE (arg0);
6899 arg0_inner = TREE_OPERAND (arg0, 0);
6900 inner_type = TREE_TYPE (arg0_inner);
6902 #ifdef HAVE_canonicalize_funcptr_for_compare
6903 /* Disable this optimization if we're casting a function pointer
6904 type on targets that require function pointer canonicalization. */
6905 if (HAVE_canonicalize_funcptr_for_compare
6906 && TREE_CODE (inner_type) == POINTER_TYPE
6907 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6908 return NULL_TREE;
6909 #endif
6911 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6912 return NULL_TREE;
6914 if (TREE_CODE (arg1) != INTEGER_CST
6915 && !(CONVERT_EXPR_P (arg1)
6916 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6917 return NULL_TREE;
6919 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6920 && code != NE_EXPR
6921 && code != EQ_EXPR)
6922 return NULL_TREE;
6924 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6925 return NULL_TREE;
6927 if (TREE_CODE (arg1) == INTEGER_CST)
6928 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6929 0, TREE_OVERFLOW (arg1));
6930 else
6931 arg1 = fold_convert_loc (loc, inner_type, arg1);
6933 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6936 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6937 step of the array. Reconstructs s and delta in the case of s *
6938 delta being an integer constant (and thus already folded). ADDR is
6939 the address. MULT is the multiplicative expression. If the
6940 function succeeds, the new address expression is returned.
6941 Otherwise NULL_TREE is returned. LOC is the location of the
6942 resulting expression. */
6944 static tree
6945 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6947 tree s, delta, step;
6948 tree ref = TREE_OPERAND (addr, 0), pref;
6949 tree ret, pos;
6950 tree itype;
6951 bool mdim = false;
6953 /* Strip the nops that might be added when converting op1 to sizetype. */
6954 STRIP_NOPS (op1);
6956 /* Canonicalize op1 into a possibly non-constant delta
6957 and an INTEGER_CST s. */
6958 if (TREE_CODE (op1) == MULT_EXPR)
6960 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6962 STRIP_NOPS (arg0);
6963 STRIP_NOPS (arg1);
6965 if (TREE_CODE (arg0) == INTEGER_CST)
6967 s = arg0;
6968 delta = arg1;
6970 else if (TREE_CODE (arg1) == INTEGER_CST)
6972 s = arg1;
6973 delta = arg0;
6975 else
6976 return NULL_TREE;
6978 else if (TREE_CODE (op1) == INTEGER_CST)
6980 delta = op1;
6981 s = NULL_TREE;
6983 else
6985 /* Simulate we are delta * 1. */
6986 delta = op1;
6987 s = integer_one_node;
6990 /* Handle &x.array the same as we would handle &x.array[0]. */
6991 if (TREE_CODE (ref) == COMPONENT_REF
6992 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6994 tree domain;
6996 /* Remember if this was a multi-dimensional array. */
6997 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6998 mdim = true;
7000 domain = TYPE_DOMAIN (TREE_TYPE (ref));
7001 if (! domain)
7002 goto cont;
7003 itype = TREE_TYPE (domain);
7005 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
7006 if (TREE_CODE (step) != INTEGER_CST)
7007 goto cont;
7009 if (s)
7011 if (! tree_int_cst_equal (step, s))
7012 goto cont;
7014 else
7016 /* Try if delta is a multiple of step. */
7017 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7018 if (! tmp)
7019 goto cont;
7020 delta = tmp;
7023 /* Only fold here if we can verify we do not overflow one
7024 dimension of a multi-dimensional array. */
7025 if (mdim)
7027 tree tmp;
7029 if (!TYPE_MIN_VALUE (domain)
7030 || !TYPE_MAX_VALUE (domain)
7031 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7032 goto cont;
7034 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7035 fold_convert_loc (loc, itype,
7036 TYPE_MIN_VALUE (domain)),
7037 fold_convert_loc (loc, itype, delta));
7038 if (TREE_CODE (tmp) != INTEGER_CST
7039 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7040 goto cont;
7043 /* We found a suitable component reference. */
7045 pref = TREE_OPERAND (addr, 0);
7046 ret = copy_node (pref);
7047 SET_EXPR_LOCATION (ret, loc);
7049 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7050 fold_build2_loc
7051 (loc, PLUS_EXPR, itype,
7052 fold_convert_loc (loc, itype,
7053 TYPE_MIN_VALUE
7054 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7055 fold_convert_loc (loc, itype, delta)),
7056 NULL_TREE, NULL_TREE);
7057 return build_fold_addr_expr_loc (loc, ret);
7060 cont:
7062 for (;; ref = TREE_OPERAND (ref, 0))
7064 if (TREE_CODE (ref) == ARRAY_REF)
7066 tree domain;
7068 /* Remember if this was a multi-dimensional array. */
7069 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7070 mdim = true;
7072 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7073 if (! domain)
7074 continue;
7075 itype = TREE_TYPE (domain);
7077 step = array_ref_element_size (ref);
7078 if (TREE_CODE (step) != INTEGER_CST)
7079 continue;
7081 if (s)
7083 if (! tree_int_cst_equal (step, s))
7084 continue;
7086 else
7088 /* Try if delta is a multiple of step. */
7089 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7090 if (! tmp)
7091 continue;
7092 delta = tmp;
7095 /* Only fold here if we can verify we do not overflow one
7096 dimension of a multi-dimensional array. */
7097 if (mdim)
7099 tree tmp;
7101 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7102 || !TYPE_MAX_VALUE (domain)
7103 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7104 continue;
7106 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7107 fold_convert_loc (loc, itype,
7108 TREE_OPERAND (ref, 1)),
7109 fold_convert_loc (loc, itype, delta));
7110 if (!tmp
7111 || TREE_CODE (tmp) != INTEGER_CST
7112 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7113 continue;
7116 break;
7118 else
7119 mdim = false;
7121 if (!handled_component_p (ref))
7122 return NULL_TREE;
7125 /* We found the suitable array reference. So copy everything up to it,
7126 and replace the index. */
7128 pref = TREE_OPERAND (addr, 0);
7129 ret = copy_node (pref);
7130 SET_EXPR_LOCATION (ret, loc);
7131 pos = ret;
7133 while (pref != ref)
7135 pref = TREE_OPERAND (pref, 0);
7136 TREE_OPERAND (pos, 0) = copy_node (pref);
7137 pos = TREE_OPERAND (pos, 0);
7140 TREE_OPERAND (pos, 1)
7141 = fold_build2_loc (loc, PLUS_EXPR, itype,
7142 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7143 fold_convert_loc (loc, itype, delta));
7144 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7148 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7149 means A >= Y && A != MAX, but in this case we know that
7150 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7152 static tree
7153 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7155 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7157 if (TREE_CODE (bound) == LT_EXPR)
7158 a = TREE_OPERAND (bound, 0);
7159 else if (TREE_CODE (bound) == GT_EXPR)
7160 a = TREE_OPERAND (bound, 1);
7161 else
7162 return NULL_TREE;
7164 typea = TREE_TYPE (a);
7165 if (!INTEGRAL_TYPE_P (typea)
7166 && !POINTER_TYPE_P (typea))
7167 return NULL_TREE;
7169 if (TREE_CODE (ineq) == LT_EXPR)
7171 a1 = TREE_OPERAND (ineq, 1);
7172 y = TREE_OPERAND (ineq, 0);
7174 else if (TREE_CODE (ineq) == GT_EXPR)
7176 a1 = TREE_OPERAND (ineq, 0);
7177 y = TREE_OPERAND (ineq, 1);
7179 else
7180 return NULL_TREE;
7182 if (TREE_TYPE (a1) != typea)
7183 return NULL_TREE;
7185 if (POINTER_TYPE_P (typea))
7187 /* Convert the pointer types into integer before taking the difference. */
7188 tree ta = fold_convert_loc (loc, ssizetype, a);
7189 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7190 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7192 else
7193 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7195 if (!diff || !integer_onep (diff))
7196 return NULL_TREE;
7198 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7201 /* Fold a sum or difference of at least one multiplication.
7202 Returns the folded tree or NULL if no simplification could be made. */
7204 static tree
7205 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7206 tree arg0, tree arg1)
7208 tree arg00, arg01, arg10, arg11;
7209 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7211 /* (A * C) +- (B * C) -> (A+-B) * C.
7212 (A * C) +- A -> A * (C+-1).
7213 We are most concerned about the case where C is a constant,
7214 but other combinations show up during loop reduction. Since
7215 it is not difficult, try all four possibilities. */
7217 if (TREE_CODE (arg0) == MULT_EXPR)
7219 arg00 = TREE_OPERAND (arg0, 0);
7220 arg01 = TREE_OPERAND (arg0, 1);
7222 else if (TREE_CODE (arg0) == INTEGER_CST)
7224 arg00 = build_one_cst (type);
7225 arg01 = arg0;
7227 else
7229 /* We cannot generate constant 1 for fract. */
7230 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7231 return NULL_TREE;
7232 arg00 = arg0;
7233 arg01 = build_one_cst (type);
7235 if (TREE_CODE (arg1) == MULT_EXPR)
7237 arg10 = TREE_OPERAND (arg1, 0);
7238 arg11 = TREE_OPERAND (arg1, 1);
7240 else if (TREE_CODE (arg1) == INTEGER_CST)
7242 arg10 = build_one_cst (type);
7243 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7244 the purpose of this canonicalization. */
7245 if (TREE_INT_CST_HIGH (arg1) == -1
7246 && negate_expr_p (arg1)
7247 && code == PLUS_EXPR)
7249 arg11 = negate_expr (arg1);
7250 code = MINUS_EXPR;
7252 else
7253 arg11 = arg1;
7255 else
7257 /* We cannot generate constant 1 for fract. */
7258 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7259 return NULL_TREE;
7260 arg10 = arg1;
7261 arg11 = build_one_cst (type);
7263 same = NULL_TREE;
7265 if (operand_equal_p (arg01, arg11, 0))
7266 same = arg01, alt0 = arg00, alt1 = arg10;
7267 else if (operand_equal_p (arg00, arg10, 0))
7268 same = arg00, alt0 = arg01, alt1 = arg11;
7269 else if (operand_equal_p (arg00, arg11, 0))
7270 same = arg00, alt0 = arg01, alt1 = arg10;
7271 else if (operand_equal_p (arg01, arg10, 0))
7272 same = arg01, alt0 = arg00, alt1 = arg11;
7274 /* No identical multiplicands; see if we can find a common
7275 power-of-two factor in non-power-of-two multiplies. This
7276 can help in multi-dimensional array access. */
7277 else if (tree_fits_shwi_p (arg01)
7278 && tree_fits_shwi_p (arg11))
7280 HOST_WIDE_INT int01, int11, tmp;
7281 bool swap = false;
7282 tree maybe_same;
7283 int01 = tree_to_shwi (arg01);
7284 int11 = tree_to_shwi (arg11);
7286 /* Move min of absolute values to int11. */
7287 if (absu_hwi (int01) < absu_hwi (int11))
7289 tmp = int01, int01 = int11, int11 = tmp;
7290 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7291 maybe_same = arg01;
7292 swap = true;
7294 else
7295 maybe_same = arg11;
7297 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7298 /* The remainder should not be a constant, otherwise we
7299 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7300 increased the number of multiplications necessary. */
7301 && TREE_CODE (arg10) != INTEGER_CST)
7303 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7304 build_int_cst (TREE_TYPE (arg00),
7305 int01 / int11));
7306 alt1 = arg10;
7307 same = maybe_same;
7308 if (swap)
7309 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7313 if (same)
7314 return fold_build2_loc (loc, MULT_EXPR, type,
7315 fold_build2_loc (loc, code, type,
7316 fold_convert_loc (loc, type, alt0),
7317 fold_convert_loc (loc, type, alt1)),
7318 fold_convert_loc (loc, type, same));
7320 return NULL_TREE;
7323 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7324 specified by EXPR into the buffer PTR of length LEN bytes.
7325 Return the number of bytes placed in the buffer, or zero
7326 upon failure. */
7328 static int
7329 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7331 tree type = TREE_TYPE (expr);
7332 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7333 int byte, offset, word, words;
7334 unsigned char value;
7336 if (total_bytes > len)
7337 return 0;
7338 words = total_bytes / UNITS_PER_WORD;
7340 for (byte = 0; byte < total_bytes; byte++)
7342 int bitpos = byte * BITS_PER_UNIT;
7343 if (bitpos < HOST_BITS_PER_WIDE_INT)
7344 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7345 else
7346 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7347 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7349 if (total_bytes > UNITS_PER_WORD)
7351 word = byte / UNITS_PER_WORD;
7352 if (WORDS_BIG_ENDIAN)
7353 word = (words - 1) - word;
7354 offset = word * UNITS_PER_WORD;
7355 if (BYTES_BIG_ENDIAN)
7356 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7357 else
7358 offset += byte % UNITS_PER_WORD;
7360 else
7361 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7362 ptr[offset] = value;
7364 return total_bytes;
7368 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7369 specified by EXPR into the buffer PTR of length LEN bytes.
7370 Return the number of bytes placed in the buffer, or zero
7371 upon failure. */
7373 static int
7374 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7376 tree type = TREE_TYPE (expr);
7377 enum machine_mode mode = TYPE_MODE (type);
7378 int total_bytes = GET_MODE_SIZE (mode);
7379 FIXED_VALUE_TYPE value;
7380 tree i_value, i_type;
7382 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7383 return 0;
7385 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7387 if (NULL_TREE == i_type
7388 || TYPE_PRECISION (i_type) != total_bytes)
7389 return 0;
7391 value = TREE_FIXED_CST (expr);
7392 i_value = double_int_to_tree (i_type, value.data);
7394 return native_encode_int (i_value, ptr, len);
7398 /* Subroutine of native_encode_expr. Encode the REAL_CST
7399 specified by EXPR into the buffer PTR of length LEN bytes.
7400 Return the number of bytes placed in the buffer, or zero
7401 upon failure. */
7403 static int
7404 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7406 tree type = TREE_TYPE (expr);
7407 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7408 int byte, offset, word, words, bitpos;
7409 unsigned char value;
7411 /* There are always 32 bits in each long, no matter the size of
7412 the hosts long. We handle floating point representations with
7413 up to 192 bits. */
7414 long tmp[6];
7416 if (total_bytes > len)
7417 return 0;
7418 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7420 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7422 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7423 bitpos += BITS_PER_UNIT)
7425 byte = (bitpos / BITS_PER_UNIT) & 3;
7426 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7428 if (UNITS_PER_WORD < 4)
7430 word = byte / UNITS_PER_WORD;
7431 if (WORDS_BIG_ENDIAN)
7432 word = (words - 1) - word;
7433 offset = word * UNITS_PER_WORD;
7434 if (BYTES_BIG_ENDIAN)
7435 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7436 else
7437 offset += byte % UNITS_PER_WORD;
7439 else
7440 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7441 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7443 return total_bytes;
7446 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7447 specified by EXPR into the buffer PTR of length LEN bytes.
7448 Return the number of bytes placed in the buffer, or zero
7449 upon failure. */
7451 static int
7452 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7454 int rsize, isize;
7455 tree part;
7457 part = TREE_REALPART (expr);
7458 rsize = native_encode_expr (part, ptr, len);
7459 if (rsize == 0)
7460 return 0;
7461 part = TREE_IMAGPART (expr);
7462 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7463 if (isize != rsize)
7464 return 0;
7465 return rsize + isize;
7469 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7470 specified by EXPR into the buffer PTR of length LEN bytes.
7471 Return the number of bytes placed in the buffer, or zero
7472 upon failure. */
7474 static int
7475 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7477 unsigned i, count;
7478 int size, offset;
7479 tree itype, elem;
7481 offset = 0;
7482 count = VECTOR_CST_NELTS (expr);
7483 itype = TREE_TYPE (TREE_TYPE (expr));
7484 size = GET_MODE_SIZE (TYPE_MODE (itype));
7485 for (i = 0; i < count; i++)
7487 elem = VECTOR_CST_ELT (expr, i);
7488 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7489 return 0;
7490 offset += size;
7492 return offset;
7496 /* Subroutine of native_encode_expr. Encode the STRING_CST
7497 specified by EXPR into the buffer PTR of length LEN bytes.
7498 Return the number of bytes placed in the buffer, or zero
7499 upon failure. */
7501 static int
7502 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7504 tree type = TREE_TYPE (expr);
7505 HOST_WIDE_INT total_bytes;
7507 if (TREE_CODE (type) != ARRAY_TYPE
7508 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7509 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7510 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7511 return 0;
7512 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7513 if (total_bytes > len)
7514 return 0;
7515 if (TREE_STRING_LENGTH (expr) < total_bytes)
7517 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7518 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7519 total_bytes - TREE_STRING_LENGTH (expr));
7521 else
7522 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7523 return total_bytes;
7527 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7528 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7529 buffer PTR of length LEN bytes. Return the number of bytes
7530 placed in the buffer, or zero upon failure. */
7533 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7535 switch (TREE_CODE (expr))
7537 case INTEGER_CST:
7538 return native_encode_int (expr, ptr, len);
7540 case REAL_CST:
7541 return native_encode_real (expr, ptr, len);
7543 case FIXED_CST:
7544 return native_encode_fixed (expr, ptr, len);
7546 case COMPLEX_CST:
7547 return native_encode_complex (expr, ptr, len);
7549 case VECTOR_CST:
7550 return native_encode_vector (expr, ptr, len);
7552 case STRING_CST:
7553 return native_encode_string (expr, ptr, len);
7555 default:
7556 return 0;
7561 /* Subroutine of native_interpret_expr. Interpret the contents of
7562 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7563 If the buffer cannot be interpreted, return NULL_TREE. */
7565 static tree
7566 native_interpret_int (tree type, const unsigned char *ptr, int len)
7568 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7569 double_int result;
7571 if (total_bytes > len
7572 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7573 return NULL_TREE;
7575 result = double_int::from_buffer (ptr, total_bytes);
7577 return double_int_to_tree (type, result);
7581 /* Subroutine of native_interpret_expr. Interpret the contents of
7582 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7583 If the buffer cannot be interpreted, return NULL_TREE. */
7585 static tree
7586 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7588 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7589 double_int result;
7590 FIXED_VALUE_TYPE fixed_value;
7592 if (total_bytes > len
7593 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7594 return NULL_TREE;
7596 result = double_int::from_buffer (ptr, total_bytes);
7597 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7599 return build_fixed (type, fixed_value);
7603 /* Subroutine of native_interpret_expr. Interpret the contents of
7604 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7605 If the buffer cannot be interpreted, return NULL_TREE. */
7607 static tree
7608 native_interpret_real (tree type, const unsigned char *ptr, int len)
7610 enum machine_mode mode = TYPE_MODE (type);
7611 int total_bytes = GET_MODE_SIZE (mode);
7612 int byte, offset, word, words, bitpos;
7613 unsigned char value;
7614 /* There are always 32 bits in each long, no matter the size of
7615 the hosts long. We handle floating point representations with
7616 up to 192 bits. */
7617 REAL_VALUE_TYPE r;
7618 long tmp[6];
7620 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7621 if (total_bytes > len || total_bytes > 24)
7622 return NULL_TREE;
7623 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7625 memset (tmp, 0, sizeof (tmp));
7626 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7627 bitpos += BITS_PER_UNIT)
7629 byte = (bitpos / BITS_PER_UNIT) & 3;
7630 if (UNITS_PER_WORD < 4)
7632 word = byte / UNITS_PER_WORD;
7633 if (WORDS_BIG_ENDIAN)
7634 word = (words - 1) - word;
7635 offset = word * UNITS_PER_WORD;
7636 if (BYTES_BIG_ENDIAN)
7637 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7638 else
7639 offset += byte % UNITS_PER_WORD;
7641 else
7642 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7643 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7645 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7648 real_from_target (&r, tmp, mode);
7649 return build_real (type, r);
7653 /* Subroutine of native_interpret_expr. Interpret the contents of
7654 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7655 If the buffer cannot be interpreted, return NULL_TREE. */
7657 static tree
7658 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7660 tree etype, rpart, ipart;
7661 int size;
7663 etype = TREE_TYPE (type);
7664 size = GET_MODE_SIZE (TYPE_MODE (etype));
7665 if (size * 2 > len)
7666 return NULL_TREE;
7667 rpart = native_interpret_expr (etype, ptr, size);
7668 if (!rpart)
7669 return NULL_TREE;
7670 ipart = native_interpret_expr (etype, ptr+size, size);
7671 if (!ipart)
7672 return NULL_TREE;
7673 return build_complex (type, rpart, ipart);
7677 /* Subroutine of native_interpret_expr. Interpret the contents of
7678 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7679 If the buffer cannot be interpreted, return NULL_TREE. */
7681 static tree
7682 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7684 tree etype, elem;
7685 int i, size, count;
7686 tree *elements;
7688 etype = TREE_TYPE (type);
7689 size = GET_MODE_SIZE (TYPE_MODE (etype));
7690 count = TYPE_VECTOR_SUBPARTS (type);
7691 if (size * count > len)
7692 return NULL_TREE;
7694 elements = XALLOCAVEC (tree, count);
7695 for (i = count - 1; i >= 0; i--)
7697 elem = native_interpret_expr (etype, ptr+(i*size), size);
7698 if (!elem)
7699 return NULL_TREE;
7700 elements[i] = elem;
7702 return build_vector (type, elements);
7706 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7707 the buffer PTR of length LEN as a constant of type TYPE. For
7708 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7709 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7710 return NULL_TREE. */
7712 tree
7713 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7715 switch (TREE_CODE (type))
7717 case INTEGER_TYPE:
7718 case ENUMERAL_TYPE:
7719 case BOOLEAN_TYPE:
7720 case POINTER_TYPE:
7721 case REFERENCE_TYPE:
7722 return native_interpret_int (type, ptr, len);
7724 case REAL_TYPE:
7725 return native_interpret_real (type, ptr, len);
7727 case FIXED_POINT_TYPE:
7728 return native_interpret_fixed (type, ptr, len);
7730 case COMPLEX_TYPE:
7731 return native_interpret_complex (type, ptr, len);
7733 case VECTOR_TYPE:
7734 return native_interpret_vector (type, ptr, len);
7736 default:
7737 return NULL_TREE;
7741 /* Returns true if we can interpret the contents of a native encoding
7742 as TYPE. */
7744 static bool
7745 can_native_interpret_type_p (tree type)
7747 switch (TREE_CODE (type))
7749 case INTEGER_TYPE:
7750 case ENUMERAL_TYPE:
7751 case BOOLEAN_TYPE:
7752 case POINTER_TYPE:
7753 case REFERENCE_TYPE:
7754 case FIXED_POINT_TYPE:
7755 case REAL_TYPE:
7756 case COMPLEX_TYPE:
7757 case VECTOR_TYPE:
7758 return true;
7759 default:
7760 return false;
7764 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7765 TYPE at compile-time. If we're unable to perform the conversion
7766 return NULL_TREE. */
7768 static tree
7769 fold_view_convert_expr (tree type, tree expr)
7771 /* We support up to 512-bit values (for V8DFmode). */
7772 unsigned char buffer[64];
7773 int len;
7775 /* Check that the host and target are sane. */
7776 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7777 return NULL_TREE;
7779 len = native_encode_expr (expr, buffer, sizeof (buffer));
7780 if (len == 0)
7781 return NULL_TREE;
7783 return native_interpret_expr (type, buffer, len);
7786 /* Build an expression for the address of T. Folds away INDIRECT_REF
7787 to avoid confusing the gimplify process. */
7789 tree
7790 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7792 /* The size of the object is not relevant when talking about its address. */
7793 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7794 t = TREE_OPERAND (t, 0);
7796 if (TREE_CODE (t) == INDIRECT_REF)
7798 t = TREE_OPERAND (t, 0);
7800 if (TREE_TYPE (t) != ptrtype)
7801 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7803 else if (TREE_CODE (t) == MEM_REF
7804 && integer_zerop (TREE_OPERAND (t, 1)))
7805 return TREE_OPERAND (t, 0);
7806 else if (TREE_CODE (t) == MEM_REF
7807 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7808 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7809 TREE_OPERAND (t, 0),
7810 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7811 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7813 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7815 if (TREE_TYPE (t) != ptrtype)
7816 t = fold_convert_loc (loc, ptrtype, t);
7818 else
7819 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7821 return t;
7824 /* Build an expression for the address of T. */
7826 tree
7827 build_fold_addr_expr_loc (location_t loc, tree t)
7829 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7831 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7834 static bool vec_cst_ctor_to_array (tree, tree *);
7836 /* Fold a unary expression of code CODE and type TYPE with operand
7837 OP0. Return the folded expression if folding is successful.
7838 Otherwise, return NULL_TREE. */
7840 tree
7841 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7843 tree tem;
7844 tree arg0;
7845 enum tree_code_class kind = TREE_CODE_CLASS (code);
7847 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7848 && TREE_CODE_LENGTH (code) == 1);
7850 arg0 = op0;
7851 if (arg0)
7853 if (CONVERT_EXPR_CODE_P (code)
7854 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7856 /* Don't use STRIP_NOPS, because signedness of argument type
7857 matters. */
7858 STRIP_SIGN_NOPS (arg0);
7860 else
7862 /* Strip any conversions that don't change the mode. This
7863 is safe for every expression, except for a comparison
7864 expression because its signedness is derived from its
7865 operands.
7867 Note that this is done as an internal manipulation within
7868 the constant folder, in order to find the simplest
7869 representation of the arguments so that their form can be
7870 studied. In any cases, the appropriate type conversions
7871 should be put back in the tree that will get out of the
7872 constant folder. */
7873 STRIP_NOPS (arg0);
7877 if (TREE_CODE_CLASS (code) == tcc_unary)
7879 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7880 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7881 fold_build1_loc (loc, code, type,
7882 fold_convert_loc (loc, TREE_TYPE (op0),
7883 TREE_OPERAND (arg0, 1))));
7884 else if (TREE_CODE (arg0) == COND_EXPR)
7886 tree arg01 = TREE_OPERAND (arg0, 1);
7887 tree arg02 = TREE_OPERAND (arg0, 2);
7888 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7889 arg01 = fold_build1_loc (loc, code, type,
7890 fold_convert_loc (loc,
7891 TREE_TYPE (op0), arg01));
7892 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7893 arg02 = fold_build1_loc (loc, code, type,
7894 fold_convert_loc (loc,
7895 TREE_TYPE (op0), arg02));
7896 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7897 arg01, arg02);
7899 /* If this was a conversion, and all we did was to move into
7900 inside the COND_EXPR, bring it back out. But leave it if
7901 it is a conversion from integer to integer and the
7902 result precision is no wider than a word since such a
7903 conversion is cheap and may be optimized away by combine,
7904 while it couldn't if it were outside the COND_EXPR. Then return
7905 so we don't get into an infinite recursion loop taking the
7906 conversion out and then back in. */
7908 if ((CONVERT_EXPR_CODE_P (code)
7909 || code == NON_LVALUE_EXPR)
7910 && TREE_CODE (tem) == COND_EXPR
7911 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7912 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7913 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7914 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7915 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7916 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7917 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7918 && (INTEGRAL_TYPE_P
7919 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7920 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7921 || flag_syntax_only))
7922 tem = build1_loc (loc, code, type,
7923 build3 (COND_EXPR,
7924 TREE_TYPE (TREE_OPERAND
7925 (TREE_OPERAND (tem, 1), 0)),
7926 TREE_OPERAND (tem, 0),
7927 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7928 TREE_OPERAND (TREE_OPERAND (tem, 2),
7929 0)));
7930 return tem;
7934 switch (code)
7936 case PAREN_EXPR:
7937 /* Re-association barriers around constants and other re-association
7938 barriers can be removed. */
7939 if (CONSTANT_CLASS_P (op0)
7940 || TREE_CODE (op0) == PAREN_EXPR)
7941 return fold_convert_loc (loc, type, op0);
7942 return NULL_TREE;
7944 CASE_CONVERT:
7945 case FLOAT_EXPR:
7946 case FIX_TRUNC_EXPR:
7947 if (TREE_TYPE (op0) == type)
7948 return op0;
7950 if (COMPARISON_CLASS_P (op0))
7952 /* If we have (type) (a CMP b) and type is an integral type, return
7953 new expression involving the new type. Canonicalize
7954 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7955 non-integral type.
7956 Do not fold the result as that would not simplify further, also
7957 folding again results in recursions. */
7958 if (TREE_CODE (type) == BOOLEAN_TYPE)
7959 return build2_loc (loc, TREE_CODE (op0), type,
7960 TREE_OPERAND (op0, 0),
7961 TREE_OPERAND (op0, 1));
7962 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7963 && TREE_CODE (type) != VECTOR_TYPE)
7964 return build3_loc (loc, COND_EXPR, type, op0,
7965 constant_boolean_node (true, type),
7966 constant_boolean_node (false, type));
7969 /* Handle cases of two conversions in a row. */
7970 if (CONVERT_EXPR_P (op0))
7972 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7973 tree inter_type = TREE_TYPE (op0);
7974 int inside_int = INTEGRAL_TYPE_P (inside_type);
7975 int inside_ptr = POINTER_TYPE_P (inside_type);
7976 int inside_float = FLOAT_TYPE_P (inside_type);
7977 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7978 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7979 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7980 int inter_int = INTEGRAL_TYPE_P (inter_type);
7981 int inter_ptr = POINTER_TYPE_P (inter_type);
7982 int inter_float = FLOAT_TYPE_P (inter_type);
7983 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7984 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7985 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7986 int final_int = INTEGRAL_TYPE_P (type);
7987 int final_ptr = POINTER_TYPE_P (type);
7988 int final_float = FLOAT_TYPE_P (type);
7989 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7990 unsigned int final_prec = TYPE_PRECISION (type);
7991 int final_unsignedp = TYPE_UNSIGNED (type);
7993 /* In addition to the cases of two conversions in a row
7994 handled below, if we are converting something to its own
7995 type via an object of identical or wider precision, neither
7996 conversion is needed. */
7997 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7998 && (((inter_int || inter_ptr) && final_int)
7999 || (inter_float && final_float))
8000 && inter_prec >= final_prec)
8001 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8003 /* Likewise, if the intermediate and initial types are either both
8004 float or both integer, we don't need the middle conversion if the
8005 former is wider than the latter and doesn't change the signedness
8006 (for integers). Avoid this if the final type is a pointer since
8007 then we sometimes need the middle conversion. Likewise if the
8008 final type has a precision not equal to the size of its mode. */
8009 if (((inter_int && inside_int)
8010 || (inter_float && inside_float)
8011 || (inter_vec && inside_vec))
8012 && inter_prec >= inside_prec
8013 && (inter_float || inter_vec
8014 || inter_unsignedp == inside_unsignedp)
8015 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8016 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8017 && ! final_ptr
8018 && (! final_vec || inter_prec == inside_prec))
8019 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8021 /* If we have a sign-extension of a zero-extended value, we can
8022 replace that by a single zero-extension. Likewise if the
8023 final conversion does not change precision we can drop the
8024 intermediate conversion. */
8025 if (inside_int && inter_int && final_int
8026 && ((inside_prec < inter_prec && inter_prec < final_prec
8027 && inside_unsignedp && !inter_unsignedp)
8028 || final_prec == inter_prec))
8029 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8031 /* Two conversions in a row are not needed unless:
8032 - some conversion is floating-point (overstrict for now), or
8033 - some conversion is a vector (overstrict for now), or
8034 - the intermediate type is narrower than both initial and
8035 final, or
8036 - the intermediate type and innermost type differ in signedness,
8037 and the outermost type is wider than the intermediate, or
8038 - the initial type is a pointer type and the precisions of the
8039 intermediate and final types differ, or
8040 - the final type is a pointer type and the precisions of the
8041 initial and intermediate types differ. */
8042 if (! inside_float && ! inter_float && ! final_float
8043 && ! inside_vec && ! inter_vec && ! final_vec
8044 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8045 && ! (inside_int && inter_int
8046 && inter_unsignedp != inside_unsignedp
8047 && inter_prec < final_prec)
8048 && ((inter_unsignedp && inter_prec > inside_prec)
8049 == (final_unsignedp && final_prec > inter_prec))
8050 && ! (inside_ptr && inter_prec != final_prec)
8051 && ! (final_ptr && inside_prec != inter_prec)
8052 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8053 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8054 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8057 /* Handle (T *)&A.B.C for A being of type T and B and C
8058 living at offset zero. This occurs frequently in
8059 C++ upcasting and then accessing the base. */
8060 if (TREE_CODE (op0) == ADDR_EXPR
8061 && POINTER_TYPE_P (type)
8062 && handled_component_p (TREE_OPERAND (op0, 0)))
8064 HOST_WIDE_INT bitsize, bitpos;
8065 tree offset;
8066 enum machine_mode mode;
8067 int unsignedp, volatilep;
8068 tree base = TREE_OPERAND (op0, 0);
8069 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8070 &mode, &unsignedp, &volatilep, false);
8071 /* If the reference was to a (constant) zero offset, we can use
8072 the address of the base if it has the same base type
8073 as the result type and the pointer type is unqualified. */
8074 if (! offset && bitpos == 0
8075 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8076 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8077 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8078 return fold_convert_loc (loc, type,
8079 build_fold_addr_expr_loc (loc, base));
8082 if (TREE_CODE (op0) == MODIFY_EXPR
8083 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8084 /* Detect assigning a bitfield. */
8085 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8086 && DECL_BIT_FIELD
8087 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8089 /* Don't leave an assignment inside a conversion
8090 unless assigning a bitfield. */
8091 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8092 /* First do the assignment, then return converted constant. */
8093 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8094 TREE_NO_WARNING (tem) = 1;
8095 TREE_USED (tem) = 1;
8096 return tem;
8099 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8100 constants (if x has signed type, the sign bit cannot be set
8101 in c). This folds extension into the BIT_AND_EXPR.
8102 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8103 very likely don't have maximal range for their precision and this
8104 transformation effectively doesn't preserve non-maximal ranges. */
8105 if (TREE_CODE (type) == INTEGER_TYPE
8106 && TREE_CODE (op0) == BIT_AND_EXPR
8107 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8109 tree and_expr = op0;
8110 tree and0 = TREE_OPERAND (and_expr, 0);
8111 tree and1 = TREE_OPERAND (and_expr, 1);
8112 int change = 0;
8114 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8115 || (TYPE_PRECISION (type)
8116 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8117 change = 1;
8118 else if (TYPE_PRECISION (TREE_TYPE (and1))
8119 <= HOST_BITS_PER_WIDE_INT
8120 && tree_fits_uhwi_p (and1))
8122 unsigned HOST_WIDE_INT cst;
8124 cst = tree_to_uhwi (and1);
8125 cst &= HOST_WIDE_INT_M1U
8126 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8127 change = (cst == 0);
8128 #ifdef LOAD_EXTEND_OP
8129 if (change
8130 && !flag_syntax_only
8131 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8132 == ZERO_EXTEND))
8134 tree uns = unsigned_type_for (TREE_TYPE (and0));
8135 and0 = fold_convert_loc (loc, uns, and0);
8136 and1 = fold_convert_loc (loc, uns, and1);
8138 #endif
8140 if (change)
8142 tem = force_fit_type_double (type, tree_to_double_int (and1),
8143 0, TREE_OVERFLOW (and1));
8144 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8145 fold_convert_loc (loc, type, and0), tem);
8149 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8150 when one of the new casts will fold away. Conservatively we assume
8151 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8152 if (POINTER_TYPE_P (type)
8153 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8154 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8155 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8156 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8157 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8159 tree arg00 = TREE_OPERAND (arg0, 0);
8160 tree arg01 = TREE_OPERAND (arg0, 1);
8162 return fold_build_pointer_plus_loc
8163 (loc, fold_convert_loc (loc, type, arg00), arg01);
8166 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8167 of the same precision, and X is an integer type not narrower than
8168 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8169 if (INTEGRAL_TYPE_P (type)
8170 && TREE_CODE (op0) == BIT_NOT_EXPR
8171 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8172 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8173 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8175 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8176 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8177 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8178 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8179 fold_convert_loc (loc, type, tem));
8182 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8183 type of X and Y (integer types only). */
8184 if (INTEGRAL_TYPE_P (type)
8185 && TREE_CODE (op0) == MULT_EXPR
8186 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8187 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8189 /* Be careful not to introduce new overflows. */
8190 tree mult_type;
8191 if (TYPE_OVERFLOW_WRAPS (type))
8192 mult_type = type;
8193 else
8194 mult_type = unsigned_type_for (type);
8196 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8198 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8199 fold_convert_loc (loc, mult_type,
8200 TREE_OPERAND (op0, 0)),
8201 fold_convert_loc (loc, mult_type,
8202 TREE_OPERAND (op0, 1)));
8203 return fold_convert_loc (loc, type, tem);
8207 tem = fold_convert_const (code, type, op0);
8208 return tem ? tem : NULL_TREE;
8210 case ADDR_SPACE_CONVERT_EXPR:
8211 if (integer_zerop (arg0))
8212 return fold_convert_const (code, type, arg0);
8213 return NULL_TREE;
8215 case FIXED_CONVERT_EXPR:
8216 tem = fold_convert_const (code, type, arg0);
8217 return tem ? tem : NULL_TREE;
8219 case VIEW_CONVERT_EXPR:
8220 if (TREE_TYPE (op0) == type)
8221 return op0;
8222 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8223 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8224 type, TREE_OPERAND (op0, 0));
8225 if (TREE_CODE (op0) == MEM_REF)
8226 return fold_build2_loc (loc, MEM_REF, type,
8227 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8229 /* For integral conversions with the same precision or pointer
8230 conversions use a NOP_EXPR instead. */
8231 if ((INTEGRAL_TYPE_P (type)
8232 || POINTER_TYPE_P (type))
8233 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8234 || POINTER_TYPE_P (TREE_TYPE (op0)))
8235 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8236 return fold_convert_loc (loc, type, op0);
8238 /* Strip inner integral conversions that do not change the precision. */
8239 if (CONVERT_EXPR_P (op0)
8240 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8241 || POINTER_TYPE_P (TREE_TYPE (op0)))
8242 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8243 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8244 && (TYPE_PRECISION (TREE_TYPE (op0))
8245 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8246 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8247 type, TREE_OPERAND (op0, 0));
8249 return fold_view_convert_expr (type, op0);
8251 case NEGATE_EXPR:
8252 tem = fold_negate_expr (loc, arg0);
8253 if (tem)
8254 return fold_convert_loc (loc, type, tem);
8255 return NULL_TREE;
8257 case ABS_EXPR:
8258 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8259 return fold_abs_const (arg0, type);
8260 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8261 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8262 /* Convert fabs((double)float) into (double)fabsf(float). */
8263 else if (TREE_CODE (arg0) == NOP_EXPR
8264 && TREE_CODE (type) == REAL_TYPE)
8266 tree targ0 = strip_float_extensions (arg0);
8267 if (targ0 != arg0)
8268 return fold_convert_loc (loc, type,
8269 fold_build1_loc (loc, ABS_EXPR,
8270 TREE_TYPE (targ0),
8271 targ0));
8273 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8274 else if (TREE_CODE (arg0) == ABS_EXPR)
8275 return arg0;
8276 else if (tree_expr_nonnegative_p (arg0))
8277 return arg0;
8279 /* Strip sign ops from argument. */
8280 if (TREE_CODE (type) == REAL_TYPE)
8282 tem = fold_strip_sign_ops (arg0);
8283 if (tem)
8284 return fold_build1_loc (loc, ABS_EXPR, type,
8285 fold_convert_loc (loc, type, tem));
8287 return NULL_TREE;
8289 case CONJ_EXPR:
8290 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8291 return fold_convert_loc (loc, type, arg0);
8292 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8294 tree itype = TREE_TYPE (type);
8295 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8296 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8297 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8298 negate_expr (ipart));
8300 if (TREE_CODE (arg0) == COMPLEX_CST)
8302 tree itype = TREE_TYPE (type);
8303 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8304 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8305 return build_complex (type, rpart, negate_expr (ipart));
8307 if (TREE_CODE (arg0) == CONJ_EXPR)
8308 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8309 return NULL_TREE;
8311 case BIT_NOT_EXPR:
8312 if (TREE_CODE (arg0) == INTEGER_CST)
8313 return fold_not_const (arg0, type);
8314 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8315 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8316 /* Convert ~ (-A) to A - 1. */
8317 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8318 return fold_build2_loc (loc, MINUS_EXPR, type,
8319 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8320 build_int_cst (type, 1));
8321 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8322 else if (INTEGRAL_TYPE_P (type)
8323 && ((TREE_CODE (arg0) == MINUS_EXPR
8324 && integer_onep (TREE_OPERAND (arg0, 1)))
8325 || (TREE_CODE (arg0) == PLUS_EXPR
8326 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8328 /* Perform the negation in ARG0's type and only then convert
8329 to TYPE as to avoid introducing undefined behavior. */
8330 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8331 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8332 TREE_OPERAND (arg0, 0));
8333 return fold_convert_loc (loc, type, t);
8335 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8336 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8337 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8338 fold_convert_loc (loc, type,
8339 TREE_OPERAND (arg0, 0)))))
8340 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8341 fold_convert_loc (loc, type,
8342 TREE_OPERAND (arg0, 1)));
8343 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8344 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8345 fold_convert_loc (loc, type,
8346 TREE_OPERAND (arg0, 1)))))
8347 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8348 fold_convert_loc (loc, type,
8349 TREE_OPERAND (arg0, 0)), tem);
8350 /* Perform BIT_NOT_EXPR on each element individually. */
8351 else if (TREE_CODE (arg0) == VECTOR_CST)
8353 tree *elements;
8354 tree elem;
8355 unsigned count = VECTOR_CST_NELTS (arg0), i;
8357 elements = XALLOCAVEC (tree, count);
8358 for (i = 0; i < count; i++)
8360 elem = VECTOR_CST_ELT (arg0, i);
8361 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8362 if (elem == NULL_TREE)
8363 break;
8364 elements[i] = elem;
8366 if (i == count)
8367 return build_vector (type, elements);
8369 else if (COMPARISON_CLASS_P (arg0)
8370 && (VECTOR_TYPE_P (type)
8371 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8373 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8374 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8375 HONOR_NANS (TYPE_MODE (op_type)));
8376 if (subcode != ERROR_MARK)
8377 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8378 TREE_OPERAND (arg0, 1));
8382 return NULL_TREE;
8384 case TRUTH_NOT_EXPR:
8385 /* Note that the operand of this must be an int
8386 and its values must be 0 or 1.
8387 ("true" is a fixed value perhaps depending on the language,
8388 but we don't handle values other than 1 correctly yet.) */
8389 tem = fold_truth_not_expr (loc, arg0);
8390 if (!tem)
8391 return NULL_TREE;
8392 return fold_convert_loc (loc, type, tem);
8394 case REALPART_EXPR:
8395 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8396 return fold_convert_loc (loc, type, arg0);
8397 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8398 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8399 TREE_OPERAND (arg0, 1));
8400 if (TREE_CODE (arg0) == COMPLEX_CST)
8401 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8402 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8404 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8405 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8406 fold_build1_loc (loc, REALPART_EXPR, itype,
8407 TREE_OPERAND (arg0, 0)),
8408 fold_build1_loc (loc, REALPART_EXPR, itype,
8409 TREE_OPERAND (arg0, 1)));
8410 return fold_convert_loc (loc, type, tem);
8412 if (TREE_CODE (arg0) == CONJ_EXPR)
8414 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8415 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8416 TREE_OPERAND (arg0, 0));
8417 return fold_convert_loc (loc, type, tem);
8419 if (TREE_CODE (arg0) == CALL_EXPR)
8421 tree fn = get_callee_fndecl (arg0);
8422 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8423 switch (DECL_FUNCTION_CODE (fn))
8425 CASE_FLT_FN (BUILT_IN_CEXPI):
8426 fn = mathfn_built_in (type, BUILT_IN_COS);
8427 if (fn)
8428 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8429 break;
8431 default:
8432 break;
8435 return NULL_TREE;
8437 case IMAGPART_EXPR:
8438 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8439 return build_zero_cst (type);
8440 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8441 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8442 TREE_OPERAND (arg0, 0));
8443 if (TREE_CODE (arg0) == COMPLEX_CST)
8444 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8445 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8447 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8448 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8449 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8450 TREE_OPERAND (arg0, 0)),
8451 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8452 TREE_OPERAND (arg0, 1)));
8453 return fold_convert_loc (loc, type, tem);
8455 if (TREE_CODE (arg0) == CONJ_EXPR)
8457 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8458 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8459 return fold_convert_loc (loc, type, negate_expr (tem));
8461 if (TREE_CODE (arg0) == CALL_EXPR)
8463 tree fn = get_callee_fndecl (arg0);
8464 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8465 switch (DECL_FUNCTION_CODE (fn))
8467 CASE_FLT_FN (BUILT_IN_CEXPI):
8468 fn = mathfn_built_in (type, BUILT_IN_SIN);
8469 if (fn)
8470 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8471 break;
8473 default:
8474 break;
8477 return NULL_TREE;
8479 case INDIRECT_REF:
8480 /* Fold *&X to X if X is an lvalue. */
8481 if (TREE_CODE (op0) == ADDR_EXPR)
8483 tree op00 = TREE_OPERAND (op0, 0);
8484 if ((TREE_CODE (op00) == VAR_DECL
8485 || TREE_CODE (op00) == PARM_DECL
8486 || TREE_CODE (op00) == RESULT_DECL)
8487 && !TREE_READONLY (op00))
8488 return op00;
8490 return NULL_TREE;
8492 case VEC_UNPACK_LO_EXPR:
8493 case VEC_UNPACK_HI_EXPR:
8494 case VEC_UNPACK_FLOAT_LO_EXPR:
8495 case VEC_UNPACK_FLOAT_HI_EXPR:
8497 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8498 tree *elts;
8499 enum tree_code subcode;
8501 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8502 if (TREE_CODE (arg0) != VECTOR_CST)
8503 return NULL_TREE;
8505 elts = XALLOCAVEC (tree, nelts * 2);
8506 if (!vec_cst_ctor_to_array (arg0, elts))
8507 return NULL_TREE;
8509 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8510 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8511 elts += nelts;
8513 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8514 subcode = NOP_EXPR;
8515 else
8516 subcode = FLOAT_EXPR;
8518 for (i = 0; i < nelts; i++)
8520 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8521 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8522 return NULL_TREE;
8525 return build_vector (type, elts);
8528 case REDUC_MIN_EXPR:
8529 case REDUC_MAX_EXPR:
8530 case REDUC_PLUS_EXPR:
8532 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8533 tree *elts;
8534 enum tree_code subcode;
8536 if (TREE_CODE (op0) != VECTOR_CST)
8537 return NULL_TREE;
8539 elts = XALLOCAVEC (tree, nelts);
8540 if (!vec_cst_ctor_to_array (op0, elts))
8541 return NULL_TREE;
8543 switch (code)
8545 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8546 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8547 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8548 default: gcc_unreachable ();
8551 for (i = 1; i < nelts; i++)
8553 elts[0] = const_binop (subcode, elts[0], elts[i]);
8554 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8555 return NULL_TREE;
8556 elts[i] = build_zero_cst (TREE_TYPE (type));
8559 return build_vector (type, elts);
8562 default:
8563 return NULL_TREE;
8564 } /* switch (code) */
8568 /* If the operation was a conversion do _not_ mark a resulting constant
8569 with TREE_OVERFLOW if the original constant was not. These conversions
8570 have implementation defined behavior and retaining the TREE_OVERFLOW
8571 flag here would confuse later passes such as VRP. */
8572 tree
8573 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8574 tree type, tree op0)
8576 tree res = fold_unary_loc (loc, code, type, op0);
8577 if (res
8578 && TREE_CODE (res) == INTEGER_CST
8579 && TREE_CODE (op0) == INTEGER_CST
8580 && CONVERT_EXPR_CODE_P (code))
8581 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8583 return res;
8586 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8587 operands OP0 and OP1. LOC is the location of the resulting expression.
8588 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8589 Return the folded expression if folding is successful. Otherwise,
8590 return NULL_TREE. */
8591 static tree
8592 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8593 tree arg0, tree arg1, tree op0, tree op1)
8595 tree tem;
8597 /* We only do these simplifications if we are optimizing. */
8598 if (!optimize)
8599 return NULL_TREE;
8601 /* Check for things like (A || B) && (A || C). We can convert this
8602 to A || (B && C). Note that either operator can be any of the four
8603 truth and/or operations and the transformation will still be
8604 valid. Also note that we only care about order for the
8605 ANDIF and ORIF operators. If B contains side effects, this
8606 might change the truth-value of A. */
8607 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8608 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8609 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8610 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8611 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8612 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8614 tree a00 = TREE_OPERAND (arg0, 0);
8615 tree a01 = TREE_OPERAND (arg0, 1);
8616 tree a10 = TREE_OPERAND (arg1, 0);
8617 tree a11 = TREE_OPERAND (arg1, 1);
8618 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8619 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8620 && (code == TRUTH_AND_EXPR
8621 || code == TRUTH_OR_EXPR));
8623 if (operand_equal_p (a00, a10, 0))
8624 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8625 fold_build2_loc (loc, code, type, a01, a11));
8626 else if (commutative && operand_equal_p (a00, a11, 0))
8627 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8628 fold_build2_loc (loc, code, type, a01, a10));
8629 else if (commutative && operand_equal_p (a01, a10, 0))
8630 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8631 fold_build2_loc (loc, code, type, a00, a11));
8633 /* This case if tricky because we must either have commutative
8634 operators or else A10 must not have side-effects. */
8636 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8637 && operand_equal_p (a01, a11, 0))
8638 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8639 fold_build2_loc (loc, code, type, a00, a10),
8640 a01);
8643 /* See if we can build a range comparison. */
8644 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8645 return tem;
8647 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8648 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8650 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8651 if (tem)
8652 return fold_build2_loc (loc, code, type, tem, arg1);
8655 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8656 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8658 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8659 if (tem)
8660 return fold_build2_loc (loc, code, type, arg0, tem);
8663 /* Check for the possibility of merging component references. If our
8664 lhs is another similar operation, try to merge its rhs with our
8665 rhs. Then try to merge our lhs and rhs. */
8666 if (TREE_CODE (arg0) == code
8667 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8668 TREE_OPERAND (arg0, 1), arg1)))
8669 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8671 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8672 return tem;
8674 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8675 && (code == TRUTH_AND_EXPR
8676 || code == TRUTH_ANDIF_EXPR
8677 || code == TRUTH_OR_EXPR
8678 || code == TRUTH_ORIF_EXPR))
8680 enum tree_code ncode, icode;
8682 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8683 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8684 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8686 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8687 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8688 We don't want to pack more than two leafs to a non-IF AND/OR
8689 expression.
8690 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8691 equal to IF-CODE, then we don't want to add right-hand operand.
8692 If the inner right-hand side of left-hand operand has
8693 side-effects, or isn't simple, then we can't add to it,
8694 as otherwise we might destroy if-sequence. */
8695 if (TREE_CODE (arg0) == icode
8696 && simple_operand_p_2 (arg1)
8697 /* Needed for sequence points to handle trappings, and
8698 side-effects. */
8699 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8701 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8702 arg1);
8703 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8704 tem);
8706 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8707 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8708 else if (TREE_CODE (arg1) == icode
8709 && simple_operand_p_2 (arg0)
8710 /* Needed for sequence points to handle trappings, and
8711 side-effects. */
8712 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8714 tem = fold_build2_loc (loc, ncode, type,
8715 arg0, TREE_OPERAND (arg1, 0));
8716 return fold_build2_loc (loc, icode, type, tem,
8717 TREE_OPERAND (arg1, 1));
8719 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8720 into (A OR B).
8721 For sequence point consistancy, we need to check for trapping,
8722 and side-effects. */
8723 else if (code == icode && simple_operand_p_2 (arg0)
8724 && simple_operand_p_2 (arg1))
8725 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8728 return NULL_TREE;
8731 /* Fold a binary expression of code CODE and type TYPE with operands
8732 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8733 Return the folded expression if folding is successful. Otherwise,
8734 return NULL_TREE. */
8736 static tree
8737 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8739 enum tree_code compl_code;
8741 if (code == MIN_EXPR)
8742 compl_code = MAX_EXPR;
8743 else if (code == MAX_EXPR)
8744 compl_code = MIN_EXPR;
8745 else
8746 gcc_unreachable ();
8748 /* MIN (MAX (a, b), b) == b. */
8749 if (TREE_CODE (op0) == compl_code
8750 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8751 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8753 /* MIN (MAX (b, a), b) == b. */
8754 if (TREE_CODE (op0) == compl_code
8755 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8756 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8757 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8759 /* MIN (a, MAX (a, b)) == a. */
8760 if (TREE_CODE (op1) == compl_code
8761 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8762 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8763 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8765 /* MIN (a, MAX (b, a)) == a. */
8766 if (TREE_CODE (op1) == compl_code
8767 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8768 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8769 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8771 return NULL_TREE;
8774 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8775 by changing CODE to reduce the magnitude of constants involved in
8776 ARG0 of the comparison.
8777 Returns a canonicalized comparison tree if a simplification was
8778 possible, otherwise returns NULL_TREE.
8779 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8780 valid if signed overflow is undefined. */
8782 static tree
8783 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8784 tree arg0, tree arg1,
8785 bool *strict_overflow_p)
8787 enum tree_code code0 = TREE_CODE (arg0);
8788 tree t, cst0 = NULL_TREE;
8789 int sgn0;
8790 bool swap = false;
8792 /* Match A +- CST code arg1 and CST code arg1. We can change the
8793 first form only if overflow is undefined. */
8794 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8795 /* In principle pointers also have undefined overflow behavior,
8796 but that causes problems elsewhere. */
8797 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8798 && (code0 == MINUS_EXPR
8799 || code0 == PLUS_EXPR)
8800 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8801 || code0 == INTEGER_CST))
8802 return NULL_TREE;
8804 /* Identify the constant in arg0 and its sign. */
8805 if (code0 == INTEGER_CST)
8806 cst0 = arg0;
8807 else
8808 cst0 = TREE_OPERAND (arg0, 1);
8809 sgn0 = tree_int_cst_sgn (cst0);
8811 /* Overflowed constants and zero will cause problems. */
8812 if (integer_zerop (cst0)
8813 || TREE_OVERFLOW (cst0))
8814 return NULL_TREE;
8816 /* See if we can reduce the magnitude of the constant in
8817 arg0 by changing the comparison code. */
8818 if (code0 == INTEGER_CST)
8820 /* CST <= arg1 -> CST-1 < arg1. */
8821 if (code == LE_EXPR && sgn0 == 1)
8822 code = LT_EXPR;
8823 /* -CST < arg1 -> -CST-1 <= arg1. */
8824 else if (code == LT_EXPR && sgn0 == -1)
8825 code = LE_EXPR;
8826 /* CST > arg1 -> CST-1 >= arg1. */
8827 else if (code == GT_EXPR && sgn0 == 1)
8828 code = GE_EXPR;
8829 /* -CST >= arg1 -> -CST-1 > arg1. */
8830 else if (code == GE_EXPR && sgn0 == -1)
8831 code = GT_EXPR;
8832 else
8833 return NULL_TREE;
8834 /* arg1 code' CST' might be more canonical. */
8835 swap = true;
8837 else
8839 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8840 if (code == LT_EXPR
8841 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8842 code = LE_EXPR;
8843 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8844 else if (code == GT_EXPR
8845 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8846 code = GE_EXPR;
8847 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8848 else if (code == LE_EXPR
8849 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8850 code = LT_EXPR;
8851 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8852 else if (code == GE_EXPR
8853 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8854 code = GT_EXPR;
8855 else
8856 return NULL_TREE;
8857 *strict_overflow_p = true;
8860 /* Now build the constant reduced in magnitude. But not if that
8861 would produce one outside of its types range. */
8862 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8863 && ((sgn0 == 1
8864 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8865 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8866 || (sgn0 == -1
8867 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8868 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8869 /* We cannot swap the comparison here as that would cause us to
8870 endlessly recurse. */
8871 return NULL_TREE;
8873 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8874 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8875 if (code0 != INTEGER_CST)
8876 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8877 t = fold_convert (TREE_TYPE (arg1), t);
8879 /* If swapping might yield to a more canonical form, do so. */
8880 if (swap)
8881 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8882 else
8883 return fold_build2_loc (loc, code, type, t, arg1);
8886 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8887 overflow further. Try to decrease the magnitude of constants involved
8888 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8889 and put sole constants at the second argument position.
8890 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8892 static tree
8893 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8894 tree arg0, tree arg1)
8896 tree t;
8897 bool strict_overflow_p;
8898 const char * const warnmsg = G_("assuming signed overflow does not occur "
8899 "when reducing constant in comparison");
8901 /* Try canonicalization by simplifying arg0. */
8902 strict_overflow_p = false;
8903 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8904 &strict_overflow_p);
8905 if (t)
8907 if (strict_overflow_p)
8908 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8909 return t;
8912 /* Try canonicalization by simplifying arg1 using the swapped
8913 comparison. */
8914 code = swap_tree_comparison (code);
8915 strict_overflow_p = false;
8916 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8917 &strict_overflow_p);
8918 if (t && strict_overflow_p)
8919 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8920 return t;
8923 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8924 space. This is used to avoid issuing overflow warnings for
8925 expressions like &p->x which can not wrap. */
8927 static bool
8928 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8930 double_int di_offset, total;
8932 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8933 return true;
8935 if (bitpos < 0)
8936 return true;
8938 if (offset == NULL_TREE)
8939 di_offset = double_int_zero;
8940 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8941 return true;
8942 else
8943 di_offset = TREE_INT_CST (offset);
8945 bool overflow;
8946 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8947 total = di_offset.add_with_sign (units, true, &overflow);
8948 if (overflow)
8949 return true;
8951 if (total.high != 0)
8952 return true;
8954 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8955 if (size <= 0)
8956 return true;
8958 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8959 array. */
8960 if (TREE_CODE (base) == ADDR_EXPR)
8962 HOST_WIDE_INT base_size;
8964 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8965 if (base_size > 0 && size < base_size)
8966 size = base_size;
8969 return total.low > (unsigned HOST_WIDE_INT) size;
8972 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8973 kind INTEGER_CST. This makes sure to properly sign-extend the
8974 constant. */
8976 static HOST_WIDE_INT
8977 size_low_cst (const_tree t)
8979 double_int d = tree_to_double_int (t);
8980 return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
8983 /* Subroutine of fold_binary. This routine performs all of the
8984 transformations that are common to the equality/inequality
8985 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8986 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8987 fold_binary should call fold_binary. Fold a comparison with
8988 tree code CODE and type TYPE with operands OP0 and OP1. Return
8989 the folded comparison or NULL_TREE. */
8991 static tree
8992 fold_comparison (location_t loc, enum tree_code code, tree type,
8993 tree op0, tree op1)
8995 tree arg0, arg1, tem;
8997 arg0 = op0;
8998 arg1 = op1;
9000 STRIP_SIGN_NOPS (arg0);
9001 STRIP_SIGN_NOPS (arg1);
9003 tem = fold_relational_const (code, type, arg0, arg1);
9004 if (tem != NULL_TREE)
9005 return tem;
9007 /* If one arg is a real or integer constant, put it last. */
9008 if (tree_swap_operands_p (arg0, arg1, true))
9009 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9011 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9012 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9013 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9014 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9015 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9016 && (TREE_CODE (arg1) == INTEGER_CST
9017 && !TREE_OVERFLOW (arg1)))
9019 tree const1 = TREE_OPERAND (arg0, 1);
9020 tree const2 = arg1;
9021 tree variable = TREE_OPERAND (arg0, 0);
9022 tree lhs;
9023 int lhs_add;
9024 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9026 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9027 TREE_TYPE (arg1), const2, const1);
9029 /* If the constant operation overflowed this can be
9030 simplified as a comparison against INT_MAX/INT_MIN. */
9031 if (TREE_CODE (lhs) == INTEGER_CST
9032 && TREE_OVERFLOW (lhs)
9033 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9035 int const1_sgn = tree_int_cst_sgn (const1);
9036 enum tree_code code2 = code;
9038 /* Get the sign of the constant on the lhs if the
9039 operation were VARIABLE + CONST1. */
9040 if (TREE_CODE (arg0) == MINUS_EXPR)
9041 const1_sgn = -const1_sgn;
9043 /* The sign of the constant determines if we overflowed
9044 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9045 Canonicalize to the INT_MIN overflow by swapping the comparison
9046 if necessary. */
9047 if (const1_sgn == -1)
9048 code2 = swap_tree_comparison (code);
9050 /* We now can look at the canonicalized case
9051 VARIABLE + 1 CODE2 INT_MIN
9052 and decide on the result. */
9053 if (code2 == LT_EXPR
9054 || code2 == LE_EXPR
9055 || code2 == EQ_EXPR)
9056 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9057 else if (code2 == NE_EXPR
9058 || code2 == GE_EXPR
9059 || code2 == GT_EXPR)
9060 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9063 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9064 && (TREE_CODE (lhs) != INTEGER_CST
9065 || !TREE_OVERFLOW (lhs)))
9067 if (code != EQ_EXPR && code != NE_EXPR)
9068 fold_overflow_warning ("assuming signed overflow does not occur "
9069 "when changing X +- C1 cmp C2 to "
9070 "X cmp C1 +- C2",
9071 WARN_STRICT_OVERFLOW_COMPARISON);
9072 return fold_build2_loc (loc, code, type, variable, lhs);
9076 /* For comparisons of pointers we can decompose it to a compile time
9077 comparison of the base objects and the offsets into the object.
9078 This requires at least one operand being an ADDR_EXPR or a
9079 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9080 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9081 && (TREE_CODE (arg0) == ADDR_EXPR
9082 || TREE_CODE (arg1) == ADDR_EXPR
9083 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9084 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9086 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9087 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9088 enum machine_mode mode;
9089 int volatilep, unsignedp;
9090 bool indirect_base0 = false, indirect_base1 = false;
9092 /* Get base and offset for the access. Strip ADDR_EXPR for
9093 get_inner_reference, but put it back by stripping INDIRECT_REF
9094 off the base object if possible. indirect_baseN will be true
9095 if baseN is not an address but refers to the object itself. */
9096 base0 = arg0;
9097 if (TREE_CODE (arg0) == ADDR_EXPR)
9099 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9100 &bitsize, &bitpos0, &offset0, &mode,
9101 &unsignedp, &volatilep, false);
9102 if (TREE_CODE (base0) == INDIRECT_REF)
9103 base0 = TREE_OPERAND (base0, 0);
9104 else
9105 indirect_base0 = true;
9107 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9109 base0 = TREE_OPERAND (arg0, 0);
9110 STRIP_SIGN_NOPS (base0);
9111 if (TREE_CODE (base0) == ADDR_EXPR)
9113 base0 = TREE_OPERAND (base0, 0);
9114 indirect_base0 = true;
9116 offset0 = TREE_OPERAND (arg0, 1);
9117 if (tree_fits_shwi_p (offset0))
9119 HOST_WIDE_INT off = size_low_cst (offset0);
9120 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9121 * BITS_PER_UNIT)
9122 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9124 bitpos0 = off * BITS_PER_UNIT;
9125 offset0 = NULL_TREE;
9130 base1 = arg1;
9131 if (TREE_CODE (arg1) == ADDR_EXPR)
9133 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9134 &bitsize, &bitpos1, &offset1, &mode,
9135 &unsignedp, &volatilep, false);
9136 if (TREE_CODE (base1) == INDIRECT_REF)
9137 base1 = TREE_OPERAND (base1, 0);
9138 else
9139 indirect_base1 = true;
9141 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9143 base1 = TREE_OPERAND (arg1, 0);
9144 STRIP_SIGN_NOPS (base1);
9145 if (TREE_CODE (base1) == ADDR_EXPR)
9147 base1 = TREE_OPERAND (base1, 0);
9148 indirect_base1 = true;
9150 offset1 = TREE_OPERAND (arg1, 1);
9151 if (tree_fits_shwi_p (offset1))
9153 HOST_WIDE_INT off = size_low_cst (offset1);
9154 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9155 * BITS_PER_UNIT)
9156 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9158 bitpos1 = off * BITS_PER_UNIT;
9159 offset1 = NULL_TREE;
9164 /* A local variable can never be pointed to by
9165 the default SSA name of an incoming parameter. */
9166 if ((TREE_CODE (arg0) == ADDR_EXPR
9167 && indirect_base0
9168 && TREE_CODE (base0) == VAR_DECL
9169 && auto_var_in_fn_p (base0, current_function_decl)
9170 && !indirect_base1
9171 && TREE_CODE (base1) == SSA_NAME
9172 && SSA_NAME_IS_DEFAULT_DEF (base1)
9173 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9174 || (TREE_CODE (arg1) == ADDR_EXPR
9175 && indirect_base1
9176 && TREE_CODE (base1) == VAR_DECL
9177 && auto_var_in_fn_p (base1, current_function_decl)
9178 && !indirect_base0
9179 && TREE_CODE (base0) == SSA_NAME
9180 && SSA_NAME_IS_DEFAULT_DEF (base0)
9181 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9183 if (code == NE_EXPR)
9184 return constant_boolean_node (1, type);
9185 else if (code == EQ_EXPR)
9186 return constant_boolean_node (0, type);
9188 /* If we have equivalent bases we might be able to simplify. */
9189 else if (indirect_base0 == indirect_base1
9190 && operand_equal_p (base0, base1, 0))
9192 /* We can fold this expression to a constant if the non-constant
9193 offset parts are equal. */
9194 if ((offset0 == offset1
9195 || (offset0 && offset1
9196 && operand_equal_p (offset0, offset1, 0)))
9197 && (code == EQ_EXPR
9198 || code == NE_EXPR
9199 || (indirect_base0 && DECL_P (base0))
9200 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9203 if (code != EQ_EXPR
9204 && code != NE_EXPR
9205 && bitpos0 != bitpos1
9206 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9207 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9208 fold_overflow_warning (("assuming pointer wraparound does not "
9209 "occur when comparing P +- C1 with "
9210 "P +- C2"),
9211 WARN_STRICT_OVERFLOW_CONDITIONAL);
9213 switch (code)
9215 case EQ_EXPR:
9216 return constant_boolean_node (bitpos0 == bitpos1, type);
9217 case NE_EXPR:
9218 return constant_boolean_node (bitpos0 != bitpos1, type);
9219 case LT_EXPR:
9220 return constant_boolean_node (bitpos0 < bitpos1, type);
9221 case LE_EXPR:
9222 return constant_boolean_node (bitpos0 <= bitpos1, type);
9223 case GE_EXPR:
9224 return constant_boolean_node (bitpos0 >= bitpos1, type);
9225 case GT_EXPR:
9226 return constant_boolean_node (bitpos0 > bitpos1, type);
9227 default:;
9230 /* We can simplify the comparison to a comparison of the variable
9231 offset parts if the constant offset parts are equal.
9232 Be careful to use signed sizetype here because otherwise we
9233 mess with array offsets in the wrong way. This is possible
9234 because pointer arithmetic is restricted to retain within an
9235 object and overflow on pointer differences is undefined as of
9236 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9237 else if (bitpos0 == bitpos1
9238 && ((code == EQ_EXPR || code == NE_EXPR)
9239 || (indirect_base0 && DECL_P (base0))
9240 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9242 /* By converting to signed sizetype we cover middle-end pointer
9243 arithmetic which operates on unsigned pointer types of size
9244 type size and ARRAY_REF offsets which are properly sign or
9245 zero extended from their type in case it is narrower than
9246 sizetype. */
9247 if (offset0 == NULL_TREE)
9248 offset0 = build_int_cst (ssizetype, 0);
9249 else
9250 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9251 if (offset1 == NULL_TREE)
9252 offset1 = build_int_cst (ssizetype, 0);
9253 else
9254 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9256 if (code != EQ_EXPR
9257 && code != NE_EXPR
9258 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9259 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9260 fold_overflow_warning (("assuming pointer wraparound does not "
9261 "occur when comparing P +- C1 with "
9262 "P +- C2"),
9263 WARN_STRICT_OVERFLOW_COMPARISON);
9265 return fold_build2_loc (loc, code, type, offset0, offset1);
9268 /* For non-equal bases we can simplify if they are addresses
9269 of local binding decls or constants. */
9270 else if (indirect_base0 && indirect_base1
9271 /* We know that !operand_equal_p (base0, base1, 0)
9272 because the if condition was false. But make
9273 sure two decls are not the same. */
9274 && base0 != base1
9275 && TREE_CODE (arg0) == ADDR_EXPR
9276 && TREE_CODE (arg1) == ADDR_EXPR
9277 && (((TREE_CODE (base0) == VAR_DECL
9278 || TREE_CODE (base0) == PARM_DECL)
9279 && (targetm.binds_local_p (base0)
9280 || CONSTANT_CLASS_P (base1)))
9281 || CONSTANT_CLASS_P (base0))
9282 && (((TREE_CODE (base1) == VAR_DECL
9283 || TREE_CODE (base1) == PARM_DECL)
9284 && (targetm.binds_local_p (base1)
9285 || CONSTANT_CLASS_P (base0)))
9286 || CONSTANT_CLASS_P (base1)))
9288 if (code == EQ_EXPR)
9289 return omit_two_operands_loc (loc, type, boolean_false_node,
9290 arg0, arg1);
9291 else if (code == NE_EXPR)
9292 return omit_two_operands_loc (loc, type, boolean_true_node,
9293 arg0, arg1);
9295 /* For equal offsets we can simplify to a comparison of the
9296 base addresses. */
9297 else if (bitpos0 == bitpos1
9298 && (indirect_base0
9299 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9300 && (indirect_base1
9301 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9302 && ((offset0 == offset1)
9303 || (offset0 && offset1
9304 && operand_equal_p (offset0, offset1, 0))))
9306 if (indirect_base0)
9307 base0 = build_fold_addr_expr_loc (loc, base0);
9308 if (indirect_base1)
9309 base1 = build_fold_addr_expr_loc (loc, base1);
9310 return fold_build2_loc (loc, code, type, base0, base1);
9314 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9315 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9316 the resulting offset is smaller in absolute value than the
9317 original one and has the same sign. */
9318 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9319 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9320 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9321 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9322 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9323 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9324 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9326 tree const1 = TREE_OPERAND (arg0, 1);
9327 tree const2 = TREE_OPERAND (arg1, 1);
9328 tree variable1 = TREE_OPERAND (arg0, 0);
9329 tree variable2 = TREE_OPERAND (arg1, 0);
9330 tree cst;
9331 const char * const warnmsg = G_("assuming signed overflow does not "
9332 "occur when combining constants around "
9333 "a comparison");
9335 /* Put the constant on the side where it doesn't overflow and is
9336 of lower absolute value and of same sign than before. */
9337 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9338 ? MINUS_EXPR : PLUS_EXPR,
9339 const2, const1);
9340 if (!TREE_OVERFLOW (cst)
9341 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9342 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9344 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9345 return fold_build2_loc (loc, code, type,
9346 variable1,
9347 fold_build2_loc (loc, TREE_CODE (arg1),
9348 TREE_TYPE (arg1),
9349 variable2, cst));
9352 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9353 ? MINUS_EXPR : PLUS_EXPR,
9354 const1, const2);
9355 if (!TREE_OVERFLOW (cst)
9356 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9357 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9359 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9360 return fold_build2_loc (loc, code, type,
9361 fold_build2_loc (loc, TREE_CODE (arg0),
9362 TREE_TYPE (arg0),
9363 variable1, cst),
9364 variable2);
9368 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9369 signed arithmetic case. That form is created by the compiler
9370 often enough for folding it to be of value. One example is in
9371 computing loop trip counts after Operator Strength Reduction. */
9372 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9373 && TREE_CODE (arg0) == MULT_EXPR
9374 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9375 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9376 && integer_zerop (arg1))
9378 tree const1 = TREE_OPERAND (arg0, 1);
9379 tree const2 = arg1; /* zero */
9380 tree variable1 = TREE_OPERAND (arg0, 0);
9381 enum tree_code cmp_code = code;
9383 /* Handle unfolded multiplication by zero. */
9384 if (integer_zerop (const1))
9385 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9387 fold_overflow_warning (("assuming signed overflow does not occur when "
9388 "eliminating multiplication in comparison "
9389 "with zero"),
9390 WARN_STRICT_OVERFLOW_COMPARISON);
9392 /* If const1 is negative we swap the sense of the comparison. */
9393 if (tree_int_cst_sgn (const1) < 0)
9394 cmp_code = swap_tree_comparison (cmp_code);
9396 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9399 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9400 if (tem)
9401 return tem;
9403 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9405 tree targ0 = strip_float_extensions (arg0);
9406 tree targ1 = strip_float_extensions (arg1);
9407 tree newtype = TREE_TYPE (targ0);
9409 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9410 newtype = TREE_TYPE (targ1);
9412 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9413 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9414 return fold_build2_loc (loc, code, type,
9415 fold_convert_loc (loc, newtype, targ0),
9416 fold_convert_loc (loc, newtype, targ1));
9418 /* (-a) CMP (-b) -> b CMP a */
9419 if (TREE_CODE (arg0) == NEGATE_EXPR
9420 && TREE_CODE (arg1) == NEGATE_EXPR)
9421 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9422 TREE_OPERAND (arg0, 0));
9424 if (TREE_CODE (arg1) == REAL_CST)
9426 REAL_VALUE_TYPE cst;
9427 cst = TREE_REAL_CST (arg1);
9429 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9430 if (TREE_CODE (arg0) == NEGATE_EXPR)
9431 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9432 TREE_OPERAND (arg0, 0),
9433 build_real (TREE_TYPE (arg1),
9434 real_value_negate (&cst)));
9436 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9437 /* a CMP (-0) -> a CMP 0 */
9438 if (REAL_VALUE_MINUS_ZERO (cst))
9439 return fold_build2_loc (loc, code, type, arg0,
9440 build_real (TREE_TYPE (arg1), dconst0));
9442 /* x != NaN is always true, other ops are always false. */
9443 if (REAL_VALUE_ISNAN (cst)
9444 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9446 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9447 return omit_one_operand_loc (loc, type, tem, arg0);
9450 /* Fold comparisons against infinity. */
9451 if (REAL_VALUE_ISINF (cst)
9452 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9454 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9455 if (tem != NULL_TREE)
9456 return tem;
9460 /* If this is a comparison of a real constant with a PLUS_EXPR
9461 or a MINUS_EXPR of a real constant, we can convert it into a
9462 comparison with a revised real constant as long as no overflow
9463 occurs when unsafe_math_optimizations are enabled. */
9464 if (flag_unsafe_math_optimizations
9465 && TREE_CODE (arg1) == REAL_CST
9466 && (TREE_CODE (arg0) == PLUS_EXPR
9467 || TREE_CODE (arg0) == MINUS_EXPR)
9468 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9469 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9470 ? MINUS_EXPR : PLUS_EXPR,
9471 arg1, TREE_OPERAND (arg0, 1)))
9472 && !TREE_OVERFLOW (tem))
9473 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9475 /* Likewise, we can simplify a comparison of a real constant with
9476 a MINUS_EXPR whose first operand is also a real constant, i.e.
9477 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9478 floating-point types only if -fassociative-math is set. */
9479 if (flag_associative_math
9480 && TREE_CODE (arg1) == REAL_CST
9481 && TREE_CODE (arg0) == MINUS_EXPR
9482 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9483 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9484 arg1))
9485 && !TREE_OVERFLOW (tem))
9486 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9487 TREE_OPERAND (arg0, 1), tem);
9489 /* Fold comparisons against built-in math functions. */
9490 if (TREE_CODE (arg1) == REAL_CST
9491 && flag_unsafe_math_optimizations
9492 && ! flag_errno_math)
9494 enum built_in_function fcode = builtin_mathfn_code (arg0);
9496 if (fcode != END_BUILTINS)
9498 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9499 if (tem != NULL_TREE)
9500 return tem;
9505 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9506 && CONVERT_EXPR_P (arg0))
9508 /* If we are widening one operand of an integer comparison,
9509 see if the other operand is similarly being widened. Perhaps we
9510 can do the comparison in the narrower type. */
9511 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9512 if (tem)
9513 return tem;
9515 /* Or if we are changing signedness. */
9516 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9517 if (tem)
9518 return tem;
9521 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9522 constant, we can simplify it. */
9523 if (TREE_CODE (arg1) == INTEGER_CST
9524 && (TREE_CODE (arg0) == MIN_EXPR
9525 || TREE_CODE (arg0) == MAX_EXPR)
9526 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9528 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9529 if (tem)
9530 return tem;
9533 /* Simplify comparison of something with itself. (For IEEE
9534 floating-point, we can only do some of these simplifications.) */
9535 if (operand_equal_p (arg0, arg1, 0))
9537 switch (code)
9539 case EQ_EXPR:
9540 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9541 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9542 return constant_boolean_node (1, type);
9543 break;
9545 case GE_EXPR:
9546 case LE_EXPR:
9547 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9548 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9549 return constant_boolean_node (1, type);
9550 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9552 case NE_EXPR:
9553 /* For NE, we can only do this simplification if integer
9554 or we don't honor IEEE floating point NaNs. */
9555 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9556 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9557 break;
9558 /* ... fall through ... */
9559 case GT_EXPR:
9560 case LT_EXPR:
9561 return constant_boolean_node (0, type);
9562 default:
9563 gcc_unreachable ();
9567 /* If we are comparing an expression that just has comparisons
9568 of two integer values, arithmetic expressions of those comparisons,
9569 and constants, we can simplify it. There are only three cases
9570 to check: the two values can either be equal, the first can be
9571 greater, or the second can be greater. Fold the expression for
9572 those three values. Since each value must be 0 or 1, we have
9573 eight possibilities, each of which corresponds to the constant 0
9574 or 1 or one of the six possible comparisons.
9576 This handles common cases like (a > b) == 0 but also handles
9577 expressions like ((x > y) - (y > x)) > 0, which supposedly
9578 occur in macroized code. */
9580 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9582 tree cval1 = 0, cval2 = 0;
9583 int save_p = 0;
9585 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9586 /* Don't handle degenerate cases here; they should already
9587 have been handled anyway. */
9588 && cval1 != 0 && cval2 != 0
9589 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9590 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9591 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9592 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9593 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9594 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9595 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9597 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9598 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9600 /* We can't just pass T to eval_subst in case cval1 or cval2
9601 was the same as ARG1. */
9603 tree high_result
9604 = fold_build2_loc (loc, code, type,
9605 eval_subst (loc, arg0, cval1, maxval,
9606 cval2, minval),
9607 arg1);
9608 tree equal_result
9609 = fold_build2_loc (loc, code, type,
9610 eval_subst (loc, arg0, cval1, maxval,
9611 cval2, maxval),
9612 arg1);
9613 tree low_result
9614 = fold_build2_loc (loc, code, type,
9615 eval_subst (loc, arg0, cval1, minval,
9616 cval2, maxval),
9617 arg1);
9619 /* All three of these results should be 0 or 1. Confirm they are.
9620 Then use those values to select the proper code to use. */
9622 if (TREE_CODE (high_result) == INTEGER_CST
9623 && TREE_CODE (equal_result) == INTEGER_CST
9624 && TREE_CODE (low_result) == INTEGER_CST)
9626 /* Make a 3-bit mask with the high-order bit being the
9627 value for `>', the next for '=', and the low for '<'. */
9628 switch ((integer_onep (high_result) * 4)
9629 + (integer_onep (equal_result) * 2)
9630 + integer_onep (low_result))
9632 case 0:
9633 /* Always false. */
9634 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9635 case 1:
9636 code = LT_EXPR;
9637 break;
9638 case 2:
9639 code = EQ_EXPR;
9640 break;
9641 case 3:
9642 code = LE_EXPR;
9643 break;
9644 case 4:
9645 code = GT_EXPR;
9646 break;
9647 case 5:
9648 code = NE_EXPR;
9649 break;
9650 case 6:
9651 code = GE_EXPR;
9652 break;
9653 case 7:
9654 /* Always true. */
9655 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9658 if (save_p)
9660 tem = save_expr (build2 (code, type, cval1, cval2));
9661 SET_EXPR_LOCATION (tem, loc);
9662 return tem;
9664 return fold_build2_loc (loc, code, type, cval1, cval2);
9669 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9670 into a single range test. */
9671 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9672 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9673 && TREE_CODE (arg1) == INTEGER_CST
9674 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9675 && !integer_zerop (TREE_OPERAND (arg0, 1))
9676 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9677 && !TREE_OVERFLOW (arg1))
9679 tem = fold_div_compare (loc, code, type, arg0, arg1);
9680 if (tem != NULL_TREE)
9681 return tem;
9684 /* Fold ~X op ~Y as Y op X. */
9685 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9686 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9688 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9689 return fold_build2_loc (loc, code, type,
9690 fold_convert_loc (loc, cmp_type,
9691 TREE_OPERAND (arg1, 0)),
9692 TREE_OPERAND (arg0, 0));
9695 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9696 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9697 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9699 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9700 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9701 TREE_OPERAND (arg0, 0),
9702 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9703 fold_convert_loc (loc, cmp_type, arg1)));
9706 return NULL_TREE;
9710 /* Subroutine of fold_binary. Optimize complex multiplications of the
9711 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9712 argument EXPR represents the expression "z" of type TYPE. */
9714 static tree
9715 fold_mult_zconjz (location_t loc, tree type, tree expr)
9717 tree itype = TREE_TYPE (type);
9718 tree rpart, ipart, tem;
9720 if (TREE_CODE (expr) == COMPLEX_EXPR)
9722 rpart = TREE_OPERAND (expr, 0);
9723 ipart = TREE_OPERAND (expr, 1);
9725 else if (TREE_CODE (expr) == COMPLEX_CST)
9727 rpart = TREE_REALPART (expr);
9728 ipart = TREE_IMAGPART (expr);
9730 else
9732 expr = save_expr (expr);
9733 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9734 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9737 rpart = save_expr (rpart);
9738 ipart = save_expr (ipart);
9739 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9740 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9741 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9742 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9743 build_zero_cst (itype));
9747 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9748 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9749 guarantees that P and N have the same least significant log2(M) bits.
9750 N is not otherwise constrained. In particular, N is not normalized to
9751 0 <= N < M as is common. In general, the precise value of P is unknown.
9752 M is chosen as large as possible such that constant N can be determined.
9754 Returns M and sets *RESIDUE to N.
9756 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9757 account. This is not always possible due to PR 35705.
9760 static unsigned HOST_WIDE_INT
9761 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9762 bool allow_func_align)
9764 enum tree_code code;
9766 *residue = 0;
9768 code = TREE_CODE (expr);
9769 if (code == ADDR_EXPR)
9771 unsigned int bitalign;
9772 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9773 *residue /= BITS_PER_UNIT;
9774 return bitalign / BITS_PER_UNIT;
9776 else if (code == POINTER_PLUS_EXPR)
9778 tree op0, op1;
9779 unsigned HOST_WIDE_INT modulus;
9780 enum tree_code inner_code;
9782 op0 = TREE_OPERAND (expr, 0);
9783 STRIP_NOPS (op0);
9784 modulus = get_pointer_modulus_and_residue (op0, residue,
9785 allow_func_align);
9787 op1 = TREE_OPERAND (expr, 1);
9788 STRIP_NOPS (op1);
9789 inner_code = TREE_CODE (op1);
9790 if (inner_code == INTEGER_CST)
9792 *residue += TREE_INT_CST_LOW (op1);
9793 return modulus;
9795 else if (inner_code == MULT_EXPR)
9797 op1 = TREE_OPERAND (op1, 1);
9798 if (TREE_CODE (op1) == INTEGER_CST)
9800 unsigned HOST_WIDE_INT align;
9802 /* Compute the greatest power-of-2 divisor of op1. */
9803 align = TREE_INT_CST_LOW (op1);
9804 align &= -align;
9806 /* If align is non-zero and less than *modulus, replace
9807 *modulus with align., If align is 0, then either op1 is 0
9808 or the greatest power-of-2 divisor of op1 doesn't fit in an
9809 unsigned HOST_WIDE_INT. In either case, no additional
9810 constraint is imposed. */
9811 if (align)
9812 modulus = MIN (modulus, align);
9814 return modulus;
9819 /* If we get here, we were unable to determine anything useful about the
9820 expression. */
9821 return 1;
9824 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9825 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9827 static bool
9828 vec_cst_ctor_to_array (tree arg, tree *elts)
9830 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9832 if (TREE_CODE (arg) == VECTOR_CST)
9834 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9835 elts[i] = VECTOR_CST_ELT (arg, i);
9837 else if (TREE_CODE (arg) == CONSTRUCTOR)
9839 constructor_elt *elt;
9841 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9842 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9843 return false;
9844 else
9845 elts[i] = elt->value;
9847 else
9848 return false;
9849 for (; i < nelts; i++)
9850 elts[i]
9851 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9852 return true;
9855 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9856 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9857 NULL_TREE otherwise. */
9859 static tree
9860 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9862 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9863 tree *elts;
9864 bool need_ctor = false;
9866 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9867 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9868 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9869 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9870 return NULL_TREE;
9872 elts = XALLOCAVEC (tree, nelts * 3);
9873 if (!vec_cst_ctor_to_array (arg0, elts)
9874 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9875 return NULL_TREE;
9877 for (i = 0; i < nelts; i++)
9879 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9880 need_ctor = true;
9881 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9884 if (need_ctor)
9886 vec<constructor_elt, va_gc> *v;
9887 vec_alloc (v, nelts);
9888 for (i = 0; i < nelts; i++)
9889 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9890 return build_constructor (type, v);
9892 else
9893 return build_vector (type, &elts[2 * nelts]);
9896 /* Try to fold a pointer difference of type TYPE two address expressions of
9897 array references AREF0 and AREF1 using location LOC. Return a
9898 simplified expression for the difference or NULL_TREE. */
9900 static tree
9901 fold_addr_of_array_ref_difference (location_t loc, tree type,
9902 tree aref0, tree aref1)
9904 tree base0 = TREE_OPERAND (aref0, 0);
9905 tree base1 = TREE_OPERAND (aref1, 0);
9906 tree base_offset = build_int_cst (type, 0);
9908 /* If the bases are array references as well, recurse. If the bases
9909 are pointer indirections compute the difference of the pointers.
9910 If the bases are equal, we are set. */
9911 if ((TREE_CODE (base0) == ARRAY_REF
9912 && TREE_CODE (base1) == ARRAY_REF
9913 && (base_offset
9914 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9915 || (INDIRECT_REF_P (base0)
9916 && INDIRECT_REF_P (base1)
9917 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9918 TREE_OPERAND (base0, 0),
9919 TREE_OPERAND (base1, 0))))
9920 || operand_equal_p (base0, base1, 0))
9922 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9923 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9924 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9925 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9926 return fold_build2_loc (loc, PLUS_EXPR, type,
9927 base_offset,
9928 fold_build2_loc (loc, MULT_EXPR, type,
9929 diff, esz));
9931 return NULL_TREE;
9934 /* If the real or vector real constant CST of type TYPE has an exact
9935 inverse, return it, else return NULL. */
9937 static tree
9938 exact_inverse (tree type, tree cst)
9940 REAL_VALUE_TYPE r;
9941 tree unit_type, *elts;
9942 enum machine_mode mode;
9943 unsigned vec_nelts, i;
9945 switch (TREE_CODE (cst))
9947 case REAL_CST:
9948 r = TREE_REAL_CST (cst);
9950 if (exact_real_inverse (TYPE_MODE (type), &r))
9951 return build_real (type, r);
9953 return NULL_TREE;
9955 case VECTOR_CST:
9956 vec_nelts = VECTOR_CST_NELTS (cst);
9957 elts = XALLOCAVEC (tree, vec_nelts);
9958 unit_type = TREE_TYPE (type);
9959 mode = TYPE_MODE (unit_type);
9961 for (i = 0; i < vec_nelts; i++)
9963 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9964 if (!exact_real_inverse (mode, &r))
9965 return NULL_TREE;
9966 elts[i] = build_real (unit_type, r);
9969 return build_vector (type, elts);
9971 default:
9972 return NULL_TREE;
9976 /* Mask out the tz least significant bits of X of type TYPE where
9977 tz is the number of trailing zeroes in Y. */
9978 static double_int
9979 mask_with_tz (tree type, double_int x, double_int y)
9981 int tz = y.trailing_zeros ();
9983 if (tz > 0)
9985 double_int mask;
9987 mask = ~double_int::mask (tz);
9988 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9989 return mask & x;
9991 return x;
9994 /* Return true when T is an address and is known to be nonzero.
9995 For floating point we further ensure that T is not denormal.
9996 Similar logic is present in nonzero_address in rtlanal.h.
9998 If the return value is based on the assumption that signed overflow
9999 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10000 change *STRICT_OVERFLOW_P. */
10002 static bool
10003 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10005 tree type = TREE_TYPE (t);
10006 enum tree_code code;
10008 /* Doing something useful for floating point would need more work. */
10009 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10010 return false;
10012 code = TREE_CODE (t);
10013 switch (TREE_CODE_CLASS (code))
10015 case tcc_unary:
10016 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10017 strict_overflow_p);
10018 case tcc_binary:
10019 case tcc_comparison:
10020 return tree_binary_nonzero_warnv_p (code, type,
10021 TREE_OPERAND (t, 0),
10022 TREE_OPERAND (t, 1),
10023 strict_overflow_p);
10024 case tcc_constant:
10025 case tcc_declaration:
10026 case tcc_reference:
10027 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10029 default:
10030 break;
10033 switch (code)
10035 case TRUTH_NOT_EXPR:
10036 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10037 strict_overflow_p);
10039 case TRUTH_AND_EXPR:
10040 case TRUTH_OR_EXPR:
10041 case TRUTH_XOR_EXPR:
10042 return tree_binary_nonzero_warnv_p (code, type,
10043 TREE_OPERAND (t, 0),
10044 TREE_OPERAND (t, 1),
10045 strict_overflow_p);
10047 case COND_EXPR:
10048 case CONSTRUCTOR:
10049 case OBJ_TYPE_REF:
10050 case ASSERT_EXPR:
10051 case ADDR_EXPR:
10052 case WITH_SIZE_EXPR:
10053 case SSA_NAME:
10054 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10056 case COMPOUND_EXPR:
10057 case MODIFY_EXPR:
10058 case BIND_EXPR:
10059 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10060 strict_overflow_p);
10062 case SAVE_EXPR:
10063 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10064 strict_overflow_p);
10066 case CALL_EXPR:
10068 tree fndecl = get_callee_fndecl (t);
10069 if (!fndecl) return false;
10070 if (flag_delete_null_pointer_checks && !flag_check_new
10071 && DECL_IS_OPERATOR_NEW (fndecl)
10072 && !TREE_NOTHROW (fndecl))
10073 return true;
10074 if (flag_delete_null_pointer_checks
10075 && lookup_attribute ("returns_nonnull",
10076 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10077 return true;
10078 return alloca_call_p (t);
10081 default:
10082 break;
10084 return false;
10087 /* Return true when T is an address and is known to be nonzero.
10088 Handle warnings about undefined signed overflow. */
10090 static bool
10091 tree_expr_nonzero_p (tree t)
10093 bool ret, strict_overflow_p;
10095 strict_overflow_p = false;
10096 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10097 if (strict_overflow_p)
10098 fold_overflow_warning (("assuming signed overflow does not occur when "
10099 "determining that expression is always "
10100 "non-zero"),
10101 WARN_STRICT_OVERFLOW_MISC);
10102 return ret;
10105 /* Fold a binary expression of code CODE and type TYPE with operands
10106 OP0 and OP1. LOC is the location of the resulting expression.
10107 Return the folded expression if folding is successful. Otherwise,
10108 return NULL_TREE. */
10110 tree
10111 fold_binary_loc (location_t loc,
10112 enum tree_code code, tree type, tree op0, tree op1)
10114 enum tree_code_class kind = TREE_CODE_CLASS (code);
10115 tree arg0, arg1, tem;
10116 tree t1 = NULL_TREE;
10117 bool strict_overflow_p;
10118 unsigned int prec;
10120 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10121 && TREE_CODE_LENGTH (code) == 2
10122 && op0 != NULL_TREE
10123 && op1 != NULL_TREE);
10125 arg0 = op0;
10126 arg1 = op1;
10128 /* Strip any conversions that don't change the mode. This is
10129 safe for every expression, except for a comparison expression
10130 because its signedness is derived from its operands. So, in
10131 the latter case, only strip conversions that don't change the
10132 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10133 preserved.
10135 Note that this is done as an internal manipulation within the
10136 constant folder, in order to find the simplest representation
10137 of the arguments so that their form can be studied. In any
10138 cases, the appropriate type conversions should be put back in
10139 the tree that will get out of the constant folder. */
10141 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10143 STRIP_SIGN_NOPS (arg0);
10144 STRIP_SIGN_NOPS (arg1);
10146 else
10148 STRIP_NOPS (arg0);
10149 STRIP_NOPS (arg1);
10152 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10153 constant but we can't do arithmetic on them. */
10154 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10155 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10156 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10157 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10158 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10159 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10160 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10162 if (kind == tcc_binary)
10164 /* Make sure type and arg0 have the same saturating flag. */
10165 gcc_assert (TYPE_SATURATING (type)
10166 == TYPE_SATURATING (TREE_TYPE (arg0)));
10167 tem = const_binop (code, arg0, arg1);
10169 else if (kind == tcc_comparison)
10170 tem = fold_relational_const (code, type, arg0, arg1);
10171 else
10172 tem = NULL_TREE;
10174 if (tem != NULL_TREE)
10176 if (TREE_TYPE (tem) != type)
10177 tem = fold_convert_loc (loc, type, tem);
10178 return tem;
10182 /* If this is a commutative operation, and ARG0 is a constant, move it
10183 to ARG1 to reduce the number of tests below. */
10184 if (commutative_tree_code (code)
10185 && tree_swap_operands_p (arg0, arg1, true))
10186 return fold_build2_loc (loc, code, type, op1, op0);
10188 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10190 First check for cases where an arithmetic operation is applied to a
10191 compound, conditional, or comparison operation. Push the arithmetic
10192 operation inside the compound or conditional to see if any folding
10193 can then be done. Convert comparison to conditional for this purpose.
10194 The also optimizes non-constant cases that used to be done in
10195 expand_expr.
10197 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10198 one of the operands is a comparison and the other is a comparison, a
10199 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10200 code below would make the expression more complex. Change it to a
10201 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10202 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10204 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10205 || code == EQ_EXPR || code == NE_EXPR)
10206 && TREE_CODE (type) != VECTOR_TYPE
10207 && ((truth_value_p (TREE_CODE (arg0))
10208 && (truth_value_p (TREE_CODE (arg1))
10209 || (TREE_CODE (arg1) == BIT_AND_EXPR
10210 && integer_onep (TREE_OPERAND (arg1, 1)))))
10211 || (truth_value_p (TREE_CODE (arg1))
10212 && (truth_value_p (TREE_CODE (arg0))
10213 || (TREE_CODE (arg0) == BIT_AND_EXPR
10214 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10216 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10217 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10218 : TRUTH_XOR_EXPR,
10219 boolean_type_node,
10220 fold_convert_loc (loc, boolean_type_node, arg0),
10221 fold_convert_loc (loc, boolean_type_node, arg1));
10223 if (code == EQ_EXPR)
10224 tem = invert_truthvalue_loc (loc, tem);
10226 return fold_convert_loc (loc, type, tem);
10229 if (TREE_CODE_CLASS (code) == tcc_binary
10230 || TREE_CODE_CLASS (code) == tcc_comparison)
10232 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10234 tem = fold_build2_loc (loc, code, type,
10235 fold_convert_loc (loc, TREE_TYPE (op0),
10236 TREE_OPERAND (arg0, 1)), op1);
10237 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10238 tem);
10240 if (TREE_CODE (arg1) == COMPOUND_EXPR
10241 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10243 tem = fold_build2_loc (loc, code, type, op0,
10244 fold_convert_loc (loc, TREE_TYPE (op1),
10245 TREE_OPERAND (arg1, 1)));
10246 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10247 tem);
10250 if (TREE_CODE (arg0) == COND_EXPR
10251 || TREE_CODE (arg0) == VEC_COND_EXPR
10252 || COMPARISON_CLASS_P (arg0))
10254 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10255 arg0, arg1,
10256 /*cond_first_p=*/1);
10257 if (tem != NULL_TREE)
10258 return tem;
10261 if (TREE_CODE (arg1) == COND_EXPR
10262 || TREE_CODE (arg1) == VEC_COND_EXPR
10263 || COMPARISON_CLASS_P (arg1))
10265 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10266 arg1, arg0,
10267 /*cond_first_p=*/0);
10268 if (tem != NULL_TREE)
10269 return tem;
10273 switch (code)
10275 case MEM_REF:
10276 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10277 if (TREE_CODE (arg0) == ADDR_EXPR
10278 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10280 tree iref = TREE_OPERAND (arg0, 0);
10281 return fold_build2 (MEM_REF, type,
10282 TREE_OPERAND (iref, 0),
10283 int_const_binop (PLUS_EXPR, arg1,
10284 TREE_OPERAND (iref, 1)));
10287 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10288 if (TREE_CODE (arg0) == ADDR_EXPR
10289 && handled_component_p (TREE_OPERAND (arg0, 0)))
10291 tree base;
10292 HOST_WIDE_INT coffset;
10293 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10294 &coffset);
10295 if (!base)
10296 return NULL_TREE;
10297 return fold_build2 (MEM_REF, type,
10298 build_fold_addr_expr (base),
10299 int_const_binop (PLUS_EXPR, arg1,
10300 size_int (coffset)));
10303 return NULL_TREE;
10305 case POINTER_PLUS_EXPR:
10306 /* 0 +p index -> (type)index */
10307 if (integer_zerop (arg0))
10308 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10310 /* PTR +p 0 -> PTR */
10311 if (integer_zerop (arg1))
10312 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10314 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10315 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10316 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10317 return fold_convert_loc (loc, type,
10318 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10319 fold_convert_loc (loc, sizetype,
10320 arg1),
10321 fold_convert_loc (loc, sizetype,
10322 arg0)));
10324 /* (PTR +p B) +p A -> PTR +p (B + A) */
10325 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10327 tree inner;
10328 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10329 tree arg00 = TREE_OPERAND (arg0, 0);
10330 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10331 arg01, fold_convert_loc (loc, sizetype, arg1));
10332 return fold_convert_loc (loc, type,
10333 fold_build_pointer_plus_loc (loc,
10334 arg00, inner));
10337 /* PTR_CST +p CST -> CST1 */
10338 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10339 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10340 fold_convert_loc (loc, type, arg1));
10342 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10343 of the array. Loop optimizer sometimes produce this type of
10344 expressions. */
10345 if (TREE_CODE (arg0) == ADDR_EXPR)
10347 tem = try_move_mult_to_index (loc, arg0,
10348 fold_convert_loc (loc,
10349 ssizetype, arg1));
10350 if (tem)
10351 return fold_convert_loc (loc, type, tem);
10354 return NULL_TREE;
10356 case PLUS_EXPR:
10357 /* A + (-B) -> A - B */
10358 if (TREE_CODE (arg1) == NEGATE_EXPR
10359 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10360 return fold_build2_loc (loc, MINUS_EXPR, type,
10361 fold_convert_loc (loc, type, arg0),
10362 fold_convert_loc (loc, type,
10363 TREE_OPERAND (arg1, 0)));
10364 /* (-A) + B -> B - A */
10365 if (TREE_CODE (arg0) == NEGATE_EXPR
10366 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10367 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10368 return fold_build2_loc (loc, MINUS_EXPR, type,
10369 fold_convert_loc (loc, type, arg1),
10370 fold_convert_loc (loc, type,
10371 TREE_OPERAND (arg0, 0)));
10373 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10375 /* Convert ~A + 1 to -A. */
10376 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10377 && integer_onep (arg1))
10378 return fold_build1_loc (loc, NEGATE_EXPR, type,
10379 fold_convert_loc (loc, type,
10380 TREE_OPERAND (arg0, 0)));
10382 /* ~X + X is -1. */
10383 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10384 && !TYPE_OVERFLOW_TRAPS (type))
10386 tree tem = TREE_OPERAND (arg0, 0);
10388 STRIP_NOPS (tem);
10389 if (operand_equal_p (tem, arg1, 0))
10391 t1 = build_all_ones_cst (type);
10392 return omit_one_operand_loc (loc, type, t1, arg1);
10396 /* X + ~X is -1. */
10397 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10398 && !TYPE_OVERFLOW_TRAPS (type))
10400 tree tem = TREE_OPERAND (arg1, 0);
10402 STRIP_NOPS (tem);
10403 if (operand_equal_p (arg0, tem, 0))
10405 t1 = build_all_ones_cst (type);
10406 return omit_one_operand_loc (loc, type, t1, arg0);
10410 /* X + (X / CST) * -CST is X % CST. */
10411 if (TREE_CODE (arg1) == MULT_EXPR
10412 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10413 && operand_equal_p (arg0,
10414 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10416 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10417 tree cst1 = TREE_OPERAND (arg1, 1);
10418 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10419 cst1, cst0);
10420 if (sum && integer_zerop (sum))
10421 return fold_convert_loc (loc, type,
10422 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10423 TREE_TYPE (arg0), arg0,
10424 cst0));
10428 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10429 one. Make sure the type is not saturating and has the signedness of
10430 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10431 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10432 if ((TREE_CODE (arg0) == MULT_EXPR
10433 || TREE_CODE (arg1) == MULT_EXPR)
10434 && !TYPE_SATURATING (type)
10435 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10436 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10437 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10439 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10440 if (tem)
10441 return tem;
10444 if (! FLOAT_TYPE_P (type))
10446 if (integer_zerop (arg1))
10447 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10449 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10450 with a constant, and the two constants have no bits in common,
10451 we should treat this as a BIT_IOR_EXPR since this may produce more
10452 simplifications. */
10453 if (TREE_CODE (arg0) == BIT_AND_EXPR
10454 && TREE_CODE (arg1) == BIT_AND_EXPR
10455 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10456 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10457 && integer_zerop (const_binop (BIT_AND_EXPR,
10458 TREE_OPERAND (arg0, 1),
10459 TREE_OPERAND (arg1, 1))))
10461 code = BIT_IOR_EXPR;
10462 goto bit_ior;
10465 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10466 (plus (plus (mult) (mult)) (foo)) so that we can
10467 take advantage of the factoring cases below. */
10468 if (TYPE_OVERFLOW_WRAPS (type)
10469 && (((TREE_CODE (arg0) == PLUS_EXPR
10470 || TREE_CODE (arg0) == MINUS_EXPR)
10471 && TREE_CODE (arg1) == MULT_EXPR)
10472 || ((TREE_CODE (arg1) == PLUS_EXPR
10473 || TREE_CODE (arg1) == MINUS_EXPR)
10474 && TREE_CODE (arg0) == MULT_EXPR)))
10476 tree parg0, parg1, parg, marg;
10477 enum tree_code pcode;
10479 if (TREE_CODE (arg1) == MULT_EXPR)
10480 parg = arg0, marg = arg1;
10481 else
10482 parg = arg1, marg = arg0;
10483 pcode = TREE_CODE (parg);
10484 parg0 = TREE_OPERAND (parg, 0);
10485 parg1 = TREE_OPERAND (parg, 1);
10486 STRIP_NOPS (parg0);
10487 STRIP_NOPS (parg1);
10489 if (TREE_CODE (parg0) == MULT_EXPR
10490 && TREE_CODE (parg1) != MULT_EXPR)
10491 return fold_build2_loc (loc, pcode, type,
10492 fold_build2_loc (loc, PLUS_EXPR, type,
10493 fold_convert_loc (loc, type,
10494 parg0),
10495 fold_convert_loc (loc, type,
10496 marg)),
10497 fold_convert_loc (loc, type, parg1));
10498 if (TREE_CODE (parg0) != MULT_EXPR
10499 && TREE_CODE (parg1) == MULT_EXPR)
10500 return
10501 fold_build2_loc (loc, PLUS_EXPR, type,
10502 fold_convert_loc (loc, type, parg0),
10503 fold_build2_loc (loc, pcode, type,
10504 fold_convert_loc (loc, type, marg),
10505 fold_convert_loc (loc, type,
10506 parg1)));
10509 else
10511 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10512 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10513 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10515 /* Likewise if the operands are reversed. */
10516 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10517 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10519 /* Convert X + -C into X - C. */
10520 if (TREE_CODE (arg1) == REAL_CST
10521 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10523 tem = fold_negate_const (arg1, type);
10524 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10525 return fold_build2_loc (loc, MINUS_EXPR, type,
10526 fold_convert_loc (loc, type, arg0),
10527 fold_convert_loc (loc, type, tem));
10530 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10531 to __complex__ ( x, y ). This is not the same for SNaNs or
10532 if signed zeros are involved. */
10533 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10534 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10535 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10537 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10538 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10539 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10540 bool arg0rz = false, arg0iz = false;
10541 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10542 || (arg0i && (arg0iz = real_zerop (arg0i))))
10544 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10545 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10546 if (arg0rz && arg1i && real_zerop (arg1i))
10548 tree rp = arg1r ? arg1r
10549 : build1 (REALPART_EXPR, rtype, arg1);
10550 tree ip = arg0i ? arg0i
10551 : build1 (IMAGPART_EXPR, rtype, arg0);
10552 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10554 else if (arg0iz && arg1r && real_zerop (arg1r))
10556 tree rp = arg0r ? arg0r
10557 : build1 (REALPART_EXPR, rtype, arg0);
10558 tree ip = arg1i ? arg1i
10559 : build1 (IMAGPART_EXPR, rtype, arg1);
10560 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10565 if (flag_unsafe_math_optimizations
10566 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10567 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10568 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10569 return tem;
10571 /* Convert x+x into x*2.0. */
10572 if (operand_equal_p (arg0, arg1, 0)
10573 && SCALAR_FLOAT_TYPE_P (type))
10574 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10575 build_real (type, dconst2));
10577 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10578 We associate floats only if the user has specified
10579 -fassociative-math. */
10580 if (flag_associative_math
10581 && TREE_CODE (arg1) == PLUS_EXPR
10582 && TREE_CODE (arg0) != MULT_EXPR)
10584 tree tree10 = TREE_OPERAND (arg1, 0);
10585 tree tree11 = TREE_OPERAND (arg1, 1);
10586 if (TREE_CODE (tree11) == MULT_EXPR
10587 && TREE_CODE (tree10) == MULT_EXPR)
10589 tree tree0;
10590 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10591 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10594 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10595 We associate floats only if the user has specified
10596 -fassociative-math. */
10597 if (flag_associative_math
10598 && TREE_CODE (arg0) == PLUS_EXPR
10599 && TREE_CODE (arg1) != MULT_EXPR)
10601 tree tree00 = TREE_OPERAND (arg0, 0);
10602 tree tree01 = TREE_OPERAND (arg0, 1);
10603 if (TREE_CODE (tree01) == MULT_EXPR
10604 && TREE_CODE (tree00) == MULT_EXPR)
10606 tree tree0;
10607 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10608 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10613 bit_rotate:
10614 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10615 is a rotate of A by C1 bits. */
10616 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10617 is a rotate of A by B bits. */
10619 enum tree_code code0, code1;
10620 tree rtype;
10621 code0 = TREE_CODE (arg0);
10622 code1 = TREE_CODE (arg1);
10623 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10624 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10625 && operand_equal_p (TREE_OPERAND (arg0, 0),
10626 TREE_OPERAND (arg1, 0), 0)
10627 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10628 TYPE_UNSIGNED (rtype))
10629 /* Only create rotates in complete modes. Other cases are not
10630 expanded properly. */
10631 && (element_precision (rtype)
10632 == element_precision (TYPE_MODE (rtype))))
10634 tree tree01, tree11;
10635 enum tree_code code01, code11;
10637 tree01 = TREE_OPERAND (arg0, 1);
10638 tree11 = TREE_OPERAND (arg1, 1);
10639 STRIP_NOPS (tree01);
10640 STRIP_NOPS (tree11);
10641 code01 = TREE_CODE (tree01);
10642 code11 = TREE_CODE (tree11);
10643 if (code01 == INTEGER_CST
10644 && code11 == INTEGER_CST
10645 && TREE_INT_CST_HIGH (tree01) == 0
10646 && TREE_INT_CST_HIGH (tree11) == 0
10647 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10648 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10650 tem = build2_loc (loc, LROTATE_EXPR,
10651 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10652 TREE_OPERAND (arg0, 0),
10653 code0 == LSHIFT_EXPR ? tree01 : tree11);
10654 return fold_convert_loc (loc, type, tem);
10656 else if (code11 == MINUS_EXPR)
10658 tree tree110, tree111;
10659 tree110 = TREE_OPERAND (tree11, 0);
10660 tree111 = TREE_OPERAND (tree11, 1);
10661 STRIP_NOPS (tree110);
10662 STRIP_NOPS (tree111);
10663 if (TREE_CODE (tree110) == INTEGER_CST
10664 && 0 == compare_tree_int (tree110,
10665 element_precision
10666 (TREE_TYPE (TREE_OPERAND
10667 (arg0, 0))))
10668 && operand_equal_p (tree01, tree111, 0))
10669 return
10670 fold_convert_loc (loc, type,
10671 build2 ((code0 == LSHIFT_EXPR
10672 ? LROTATE_EXPR
10673 : RROTATE_EXPR),
10674 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10675 TREE_OPERAND (arg0, 0), tree01));
10677 else if (code01 == MINUS_EXPR)
10679 tree tree010, tree011;
10680 tree010 = TREE_OPERAND (tree01, 0);
10681 tree011 = TREE_OPERAND (tree01, 1);
10682 STRIP_NOPS (tree010);
10683 STRIP_NOPS (tree011);
10684 if (TREE_CODE (tree010) == INTEGER_CST
10685 && 0 == compare_tree_int (tree010,
10686 element_precision
10687 (TREE_TYPE (TREE_OPERAND
10688 (arg0, 0))))
10689 && operand_equal_p (tree11, tree011, 0))
10690 return fold_convert_loc
10691 (loc, type,
10692 build2 ((code0 != LSHIFT_EXPR
10693 ? LROTATE_EXPR
10694 : RROTATE_EXPR),
10695 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10696 TREE_OPERAND (arg0, 0), tree11));
10701 associate:
10702 /* In most languages, can't associate operations on floats through
10703 parentheses. Rather than remember where the parentheses were, we
10704 don't associate floats at all, unless the user has specified
10705 -fassociative-math.
10706 And, we need to make sure type is not saturating. */
10708 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10709 && !TYPE_SATURATING (type))
10711 tree var0, con0, lit0, minus_lit0;
10712 tree var1, con1, lit1, minus_lit1;
10713 tree atype = type;
10714 bool ok = true;
10716 /* Split both trees into variables, constants, and literals. Then
10717 associate each group together, the constants with literals,
10718 then the result with variables. This increases the chances of
10719 literals being recombined later and of generating relocatable
10720 expressions for the sum of a constant and literal. */
10721 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10722 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10723 code == MINUS_EXPR);
10725 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10726 if (code == MINUS_EXPR)
10727 code = PLUS_EXPR;
10729 /* With undefined overflow prefer doing association in a type
10730 which wraps on overflow, if that is one of the operand types. */
10731 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10732 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10734 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10735 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10736 atype = TREE_TYPE (arg0);
10737 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10738 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10739 atype = TREE_TYPE (arg1);
10740 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10743 /* With undefined overflow we can only associate constants with one
10744 variable, and constants whose association doesn't overflow. */
10745 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10746 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10748 if (var0 && var1)
10750 tree tmp0 = var0;
10751 tree tmp1 = var1;
10753 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10754 tmp0 = TREE_OPERAND (tmp0, 0);
10755 if (CONVERT_EXPR_P (tmp0)
10756 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10757 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10758 <= TYPE_PRECISION (atype)))
10759 tmp0 = TREE_OPERAND (tmp0, 0);
10760 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10761 tmp1 = TREE_OPERAND (tmp1, 0);
10762 if (CONVERT_EXPR_P (tmp1)
10763 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10764 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10765 <= TYPE_PRECISION (atype)))
10766 tmp1 = TREE_OPERAND (tmp1, 0);
10767 /* The only case we can still associate with two variables
10768 is if they are the same, modulo negation and bit-pattern
10769 preserving conversions. */
10770 if (!operand_equal_p (tmp0, tmp1, 0))
10771 ok = false;
10775 /* Only do something if we found more than two objects. Otherwise,
10776 nothing has changed and we risk infinite recursion. */
10777 if (ok
10778 && (2 < ((var0 != 0) + (var1 != 0)
10779 + (con0 != 0) + (con1 != 0)
10780 + (lit0 != 0) + (lit1 != 0)
10781 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10783 bool any_overflows = false;
10784 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10785 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10786 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10787 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10788 var0 = associate_trees (loc, var0, var1, code, atype);
10789 con0 = associate_trees (loc, con0, con1, code, atype);
10790 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10791 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10792 code, atype);
10794 /* Preserve the MINUS_EXPR if the negative part of the literal is
10795 greater than the positive part. Otherwise, the multiplicative
10796 folding code (i.e extract_muldiv) may be fooled in case
10797 unsigned constants are subtracted, like in the following
10798 example: ((X*2 + 4) - 8U)/2. */
10799 if (minus_lit0 && lit0)
10801 if (TREE_CODE (lit0) == INTEGER_CST
10802 && TREE_CODE (minus_lit0) == INTEGER_CST
10803 && tree_int_cst_lt (lit0, minus_lit0))
10805 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10806 MINUS_EXPR, atype);
10807 lit0 = 0;
10809 else
10811 lit0 = associate_trees (loc, lit0, minus_lit0,
10812 MINUS_EXPR, atype);
10813 minus_lit0 = 0;
10817 /* Don't introduce overflows through reassociation. */
10818 if (!any_overflows
10819 && ((lit0 && TREE_OVERFLOW_P (lit0))
10820 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10821 return NULL_TREE;
10823 if (minus_lit0)
10825 if (con0 == 0)
10826 return
10827 fold_convert_loc (loc, type,
10828 associate_trees (loc, var0, minus_lit0,
10829 MINUS_EXPR, atype));
10830 else
10832 con0 = associate_trees (loc, con0, minus_lit0,
10833 MINUS_EXPR, atype);
10834 return
10835 fold_convert_loc (loc, type,
10836 associate_trees (loc, var0, con0,
10837 PLUS_EXPR, atype));
10841 con0 = associate_trees (loc, con0, lit0, code, atype);
10842 return
10843 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10844 code, atype));
10848 return NULL_TREE;
10850 case MINUS_EXPR:
10851 /* Pointer simplifications for subtraction, simple reassociations. */
10852 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10854 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10855 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10856 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10858 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10859 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10860 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10861 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10862 return fold_build2_loc (loc, PLUS_EXPR, type,
10863 fold_build2_loc (loc, MINUS_EXPR, type,
10864 arg00, arg10),
10865 fold_build2_loc (loc, MINUS_EXPR, type,
10866 arg01, arg11));
10868 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10869 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10871 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10872 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10873 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10874 fold_convert_loc (loc, type, arg1));
10875 if (tmp)
10876 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10879 /* A - (-B) -> A + B */
10880 if (TREE_CODE (arg1) == NEGATE_EXPR)
10881 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10882 fold_convert_loc (loc, type,
10883 TREE_OPERAND (arg1, 0)));
10884 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10885 if (TREE_CODE (arg0) == NEGATE_EXPR
10886 && negate_expr_p (arg1)
10887 && reorder_operands_p (arg0, arg1))
10888 return fold_build2_loc (loc, MINUS_EXPR, type,
10889 fold_convert_loc (loc, type,
10890 negate_expr (arg1)),
10891 fold_convert_loc (loc, type,
10892 TREE_OPERAND (arg0, 0)));
10893 /* Convert -A - 1 to ~A. */
10894 if (TREE_CODE (type) != COMPLEX_TYPE
10895 && TREE_CODE (arg0) == NEGATE_EXPR
10896 && integer_onep (arg1)
10897 && !TYPE_OVERFLOW_TRAPS (type))
10898 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10899 fold_convert_loc (loc, type,
10900 TREE_OPERAND (arg0, 0)));
10902 /* Convert -1 - A to ~A. */
10903 if (TREE_CODE (type) != COMPLEX_TYPE
10904 && integer_all_onesp (arg0))
10905 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10908 /* X - (X / Y) * Y is X % Y. */
10909 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10910 && TREE_CODE (arg1) == MULT_EXPR
10911 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10912 && operand_equal_p (arg0,
10913 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10914 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10915 TREE_OPERAND (arg1, 1), 0))
10916 return
10917 fold_convert_loc (loc, type,
10918 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10919 arg0, TREE_OPERAND (arg1, 1)));
10921 if (! FLOAT_TYPE_P (type))
10923 if (integer_zerop (arg0))
10924 return negate_expr (fold_convert_loc (loc, type, arg1));
10925 if (integer_zerop (arg1))
10926 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10928 /* Fold A - (A & B) into ~B & A. */
10929 if (!TREE_SIDE_EFFECTS (arg0)
10930 && TREE_CODE (arg1) == BIT_AND_EXPR)
10932 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10934 tree arg10 = fold_convert_loc (loc, type,
10935 TREE_OPERAND (arg1, 0));
10936 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10937 fold_build1_loc (loc, BIT_NOT_EXPR,
10938 type, arg10),
10939 fold_convert_loc (loc, type, arg0));
10941 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10943 tree arg11 = fold_convert_loc (loc,
10944 type, TREE_OPERAND (arg1, 1));
10945 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10946 fold_build1_loc (loc, BIT_NOT_EXPR,
10947 type, arg11),
10948 fold_convert_loc (loc, type, arg0));
10952 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10953 any power of 2 minus 1. */
10954 if (TREE_CODE (arg0) == BIT_AND_EXPR
10955 && TREE_CODE (arg1) == BIT_AND_EXPR
10956 && operand_equal_p (TREE_OPERAND (arg0, 0),
10957 TREE_OPERAND (arg1, 0), 0))
10959 tree mask0 = TREE_OPERAND (arg0, 1);
10960 tree mask1 = TREE_OPERAND (arg1, 1);
10961 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10963 if (operand_equal_p (tem, mask1, 0))
10965 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10966 TREE_OPERAND (arg0, 0), mask1);
10967 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10972 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10973 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10974 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10976 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10977 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10978 (-ARG1 + ARG0) reduces to -ARG1. */
10979 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10980 return negate_expr (fold_convert_loc (loc, type, arg1));
10982 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10983 __complex__ ( x, -y ). This is not the same for SNaNs or if
10984 signed zeros are involved. */
10985 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10986 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10987 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10989 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10990 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10991 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10992 bool arg0rz = false, arg0iz = false;
10993 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10994 || (arg0i && (arg0iz = real_zerop (arg0i))))
10996 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10997 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10998 if (arg0rz && arg1i && real_zerop (arg1i))
11000 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11001 arg1r ? arg1r
11002 : build1 (REALPART_EXPR, rtype, arg1));
11003 tree ip = arg0i ? arg0i
11004 : build1 (IMAGPART_EXPR, rtype, arg0);
11005 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11007 else if (arg0iz && arg1r && real_zerop (arg1r))
11009 tree rp = arg0r ? arg0r
11010 : build1 (REALPART_EXPR, rtype, arg0);
11011 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11012 arg1i ? arg1i
11013 : build1 (IMAGPART_EXPR, rtype, arg1));
11014 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11019 /* Fold &x - &x. This can happen from &x.foo - &x.
11020 This is unsafe for certain floats even in non-IEEE formats.
11021 In IEEE, it is unsafe because it does wrong for NaNs.
11022 Also note that operand_equal_p is always false if an operand
11023 is volatile. */
11025 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
11026 && operand_equal_p (arg0, arg1, 0))
11027 return build_zero_cst (type);
11029 /* A - B -> A + (-B) if B is easily negatable. */
11030 if (negate_expr_p (arg1)
11031 && ((FLOAT_TYPE_P (type)
11032 /* Avoid this transformation if B is a positive REAL_CST. */
11033 && (TREE_CODE (arg1) != REAL_CST
11034 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
11035 || INTEGRAL_TYPE_P (type)))
11036 return fold_build2_loc (loc, PLUS_EXPR, type,
11037 fold_convert_loc (loc, type, arg0),
11038 fold_convert_loc (loc, type,
11039 negate_expr (arg1)));
11041 /* Try folding difference of addresses. */
11043 HOST_WIDE_INT diff;
11045 if ((TREE_CODE (arg0) == ADDR_EXPR
11046 || TREE_CODE (arg1) == ADDR_EXPR)
11047 && ptr_difference_const (arg0, arg1, &diff))
11048 return build_int_cst_type (type, diff);
11051 /* Fold &a[i] - &a[j] to i-j. */
11052 if (TREE_CODE (arg0) == ADDR_EXPR
11053 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11054 && TREE_CODE (arg1) == ADDR_EXPR
11055 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11057 tree tem = fold_addr_of_array_ref_difference (loc, type,
11058 TREE_OPERAND (arg0, 0),
11059 TREE_OPERAND (arg1, 0));
11060 if (tem)
11061 return tem;
11064 if (FLOAT_TYPE_P (type)
11065 && flag_unsafe_math_optimizations
11066 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11067 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11068 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11069 return tem;
11071 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11072 one. Make sure the type is not saturating and has the signedness of
11073 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11074 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11075 if ((TREE_CODE (arg0) == MULT_EXPR
11076 || TREE_CODE (arg1) == MULT_EXPR)
11077 && !TYPE_SATURATING (type)
11078 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11079 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11080 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11082 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11083 if (tem)
11084 return tem;
11087 goto associate;
11089 case MULT_EXPR:
11090 /* (-A) * (-B) -> A * B */
11091 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11092 return fold_build2_loc (loc, MULT_EXPR, type,
11093 fold_convert_loc (loc, type,
11094 TREE_OPERAND (arg0, 0)),
11095 fold_convert_loc (loc, type,
11096 negate_expr (arg1)));
11097 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11098 return fold_build2_loc (loc, MULT_EXPR, type,
11099 fold_convert_loc (loc, type,
11100 negate_expr (arg0)),
11101 fold_convert_loc (loc, type,
11102 TREE_OPERAND (arg1, 0)));
11104 if (! FLOAT_TYPE_P (type))
11106 if (integer_zerop (arg1))
11107 return omit_one_operand_loc (loc, type, arg1, arg0);
11108 if (integer_onep (arg1))
11109 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11110 /* Transform x * -1 into -x. Make sure to do the negation
11111 on the original operand with conversions not stripped
11112 because we can only strip non-sign-changing conversions. */
11113 if (integer_minus_onep (arg1))
11114 return fold_convert_loc (loc, type, negate_expr (op0));
11115 /* Transform x * -C into -x * C if x is easily negatable. */
11116 if (TREE_CODE (arg1) == INTEGER_CST
11117 && tree_int_cst_sgn (arg1) == -1
11118 && negate_expr_p (arg0)
11119 && (tem = negate_expr (arg1)) != arg1
11120 && !TREE_OVERFLOW (tem))
11121 return fold_build2_loc (loc, MULT_EXPR, type,
11122 fold_convert_loc (loc, type,
11123 negate_expr (arg0)),
11124 tem);
11126 /* (a * (1 << b)) is (a << b) */
11127 if (TREE_CODE (arg1) == LSHIFT_EXPR
11128 && integer_onep (TREE_OPERAND (arg1, 0)))
11129 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11130 TREE_OPERAND (arg1, 1));
11131 if (TREE_CODE (arg0) == LSHIFT_EXPR
11132 && integer_onep (TREE_OPERAND (arg0, 0)))
11133 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11134 TREE_OPERAND (arg0, 1));
11136 /* (A + A) * C -> A * 2 * C */
11137 if (TREE_CODE (arg0) == PLUS_EXPR
11138 && TREE_CODE (arg1) == INTEGER_CST
11139 && operand_equal_p (TREE_OPERAND (arg0, 0),
11140 TREE_OPERAND (arg0, 1), 0))
11141 return fold_build2_loc (loc, MULT_EXPR, type,
11142 omit_one_operand_loc (loc, type,
11143 TREE_OPERAND (arg0, 0),
11144 TREE_OPERAND (arg0, 1)),
11145 fold_build2_loc (loc, MULT_EXPR, type,
11146 build_int_cst (type, 2) , arg1));
11148 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11149 sign-changing only. */
11150 if (TREE_CODE (arg1) == INTEGER_CST
11151 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11152 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11153 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11155 strict_overflow_p = false;
11156 if (TREE_CODE (arg1) == INTEGER_CST
11157 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11158 &strict_overflow_p)))
11160 if (strict_overflow_p)
11161 fold_overflow_warning (("assuming signed overflow does not "
11162 "occur when simplifying "
11163 "multiplication"),
11164 WARN_STRICT_OVERFLOW_MISC);
11165 return fold_convert_loc (loc, type, tem);
11168 /* Optimize z * conj(z) for integer complex numbers. */
11169 if (TREE_CODE (arg0) == CONJ_EXPR
11170 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11171 return fold_mult_zconjz (loc, type, arg1);
11172 if (TREE_CODE (arg1) == CONJ_EXPR
11173 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11174 return fold_mult_zconjz (loc, type, arg0);
11176 else
11178 /* Maybe fold x * 0 to 0. The expressions aren't the same
11179 when x is NaN, since x * 0 is also NaN. Nor are they the
11180 same in modes with signed zeros, since multiplying a
11181 negative value by 0 gives -0, not +0. */
11182 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11183 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11184 && real_zerop (arg1))
11185 return omit_one_operand_loc (loc, type, arg1, arg0);
11186 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11187 Likewise for complex arithmetic with signed zeros. */
11188 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11189 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11190 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11191 && real_onep (arg1))
11192 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11194 /* Transform x * -1.0 into -x. */
11195 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11196 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11197 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11198 && real_minus_onep (arg1))
11199 return fold_convert_loc (loc, type, negate_expr (arg0));
11201 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11202 the result for floating point types due to rounding so it is applied
11203 only if -fassociative-math was specify. */
11204 if (flag_associative_math
11205 && TREE_CODE (arg0) == RDIV_EXPR
11206 && TREE_CODE (arg1) == REAL_CST
11207 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11209 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11210 arg1);
11211 if (tem)
11212 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11213 TREE_OPERAND (arg0, 1));
11216 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11217 if (operand_equal_p (arg0, arg1, 0))
11219 tree tem = fold_strip_sign_ops (arg0);
11220 if (tem != NULL_TREE)
11222 tem = fold_convert_loc (loc, type, tem);
11223 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11227 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11228 This is not the same for NaNs or if signed zeros are
11229 involved. */
11230 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11231 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11232 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11233 && TREE_CODE (arg1) == COMPLEX_CST
11234 && real_zerop (TREE_REALPART (arg1)))
11236 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11237 if (real_onep (TREE_IMAGPART (arg1)))
11238 return
11239 fold_build2_loc (loc, COMPLEX_EXPR, type,
11240 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11241 rtype, arg0)),
11242 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11243 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11244 return
11245 fold_build2_loc (loc, COMPLEX_EXPR, type,
11246 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11247 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11248 rtype, arg0)));
11251 /* Optimize z * conj(z) for floating point complex numbers.
11252 Guarded by flag_unsafe_math_optimizations as non-finite
11253 imaginary components don't produce scalar results. */
11254 if (flag_unsafe_math_optimizations
11255 && TREE_CODE (arg0) == CONJ_EXPR
11256 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11257 return fold_mult_zconjz (loc, type, arg1);
11258 if (flag_unsafe_math_optimizations
11259 && TREE_CODE (arg1) == CONJ_EXPR
11260 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11261 return fold_mult_zconjz (loc, type, arg0);
11263 if (flag_unsafe_math_optimizations)
11265 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11266 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11268 /* Optimizations of root(...)*root(...). */
11269 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11271 tree rootfn, arg;
11272 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11273 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11275 /* Optimize sqrt(x)*sqrt(x) as x. */
11276 if (BUILTIN_SQRT_P (fcode0)
11277 && operand_equal_p (arg00, arg10, 0)
11278 && ! HONOR_SNANS (TYPE_MODE (type)))
11279 return arg00;
11281 /* Optimize root(x)*root(y) as root(x*y). */
11282 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11283 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11284 return build_call_expr_loc (loc, rootfn, 1, arg);
11287 /* Optimize expN(x)*expN(y) as expN(x+y). */
11288 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11290 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11291 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11292 CALL_EXPR_ARG (arg0, 0),
11293 CALL_EXPR_ARG (arg1, 0));
11294 return build_call_expr_loc (loc, expfn, 1, arg);
11297 /* Optimizations of pow(...)*pow(...). */
11298 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11299 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11300 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11302 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11303 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11304 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11305 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11307 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11308 if (operand_equal_p (arg01, arg11, 0))
11310 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11311 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11312 arg00, arg10);
11313 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11316 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11317 if (operand_equal_p (arg00, arg10, 0))
11319 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11320 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11321 arg01, arg11);
11322 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11326 /* Optimize tan(x)*cos(x) as sin(x). */
11327 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11328 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11329 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11330 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11331 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11332 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11333 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11334 CALL_EXPR_ARG (arg1, 0), 0))
11336 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11338 if (sinfn != NULL_TREE)
11339 return build_call_expr_loc (loc, sinfn, 1,
11340 CALL_EXPR_ARG (arg0, 0));
11343 /* Optimize x*pow(x,c) as pow(x,c+1). */
11344 if (fcode1 == BUILT_IN_POW
11345 || fcode1 == BUILT_IN_POWF
11346 || fcode1 == BUILT_IN_POWL)
11348 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11349 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11350 if (TREE_CODE (arg11) == REAL_CST
11351 && !TREE_OVERFLOW (arg11)
11352 && operand_equal_p (arg0, arg10, 0))
11354 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11355 REAL_VALUE_TYPE c;
11356 tree arg;
11358 c = TREE_REAL_CST (arg11);
11359 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11360 arg = build_real (type, c);
11361 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11365 /* Optimize pow(x,c)*x as pow(x,c+1). */
11366 if (fcode0 == BUILT_IN_POW
11367 || fcode0 == BUILT_IN_POWF
11368 || fcode0 == BUILT_IN_POWL)
11370 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11371 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11372 if (TREE_CODE (arg01) == REAL_CST
11373 && !TREE_OVERFLOW (arg01)
11374 && operand_equal_p (arg1, arg00, 0))
11376 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11377 REAL_VALUE_TYPE c;
11378 tree arg;
11380 c = TREE_REAL_CST (arg01);
11381 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11382 arg = build_real (type, c);
11383 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11387 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11388 if (!in_gimple_form
11389 && optimize
11390 && operand_equal_p (arg0, arg1, 0))
11392 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11394 if (powfn)
11396 tree arg = build_real (type, dconst2);
11397 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11402 goto associate;
11404 case BIT_IOR_EXPR:
11405 bit_ior:
11406 if (integer_all_onesp (arg1))
11407 return omit_one_operand_loc (loc, type, arg1, arg0);
11408 if (integer_zerop (arg1))
11409 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11410 if (operand_equal_p (arg0, arg1, 0))
11411 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11413 /* ~X | X is -1. */
11414 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11415 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11417 t1 = build_zero_cst (type);
11418 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11419 return omit_one_operand_loc (loc, type, t1, arg1);
11422 /* X | ~X is -1. */
11423 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11424 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11426 t1 = build_zero_cst (type);
11427 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11428 return omit_one_operand_loc (loc, type, t1, arg0);
11431 /* Canonicalize (X & C1) | C2. */
11432 if (TREE_CODE (arg0) == BIT_AND_EXPR
11433 && TREE_CODE (arg1) == INTEGER_CST
11434 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11436 double_int c1, c2, c3, msk;
11437 int width = TYPE_PRECISION (type), w;
11439 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11440 c2 = tree_to_double_int (arg1);
11442 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11443 if ((c1 & c2) == c1)
11444 return omit_one_operand_loc (loc, type, arg1,
11445 TREE_OPERAND (arg0, 0));
11447 msk = double_int::mask (width);
11449 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11450 if (msk.and_not (c1 | c2).is_zero ())
11451 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11452 TREE_OPERAND (arg0, 0), arg1);
11454 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11455 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11456 mode which allows further optimizations. */
11457 c1 &= msk;
11458 c2 &= msk;
11459 c3 = c1.and_not (c2);
11460 for (w = BITS_PER_UNIT;
11461 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11462 w <<= 1)
11464 unsigned HOST_WIDE_INT mask
11465 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11466 if (((c1.low | c2.low) & mask) == mask
11467 && (c1.low & ~mask) == 0 && c1.high == 0)
11469 c3 = double_int::from_uhwi (mask);
11470 break;
11474 if (c3 != c1)
11475 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11476 fold_build2_loc (loc, BIT_AND_EXPR, type,
11477 TREE_OPERAND (arg0, 0),
11478 double_int_to_tree (type,
11479 c3)),
11480 arg1);
11483 /* (X & Y) | Y is (X, Y). */
11484 if (TREE_CODE (arg0) == BIT_AND_EXPR
11485 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11486 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11487 /* (X & Y) | X is (Y, X). */
11488 if (TREE_CODE (arg0) == BIT_AND_EXPR
11489 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11490 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11491 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11492 /* X | (X & Y) is (Y, X). */
11493 if (TREE_CODE (arg1) == BIT_AND_EXPR
11494 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11495 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11496 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11497 /* X | (Y & X) is (Y, X). */
11498 if (TREE_CODE (arg1) == BIT_AND_EXPR
11499 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11500 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11501 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11503 /* (X & ~Y) | (~X & Y) is X ^ Y */
11504 if (TREE_CODE (arg0) == BIT_AND_EXPR
11505 && TREE_CODE (arg1) == BIT_AND_EXPR)
11507 tree a0, a1, l0, l1, n0, n1;
11509 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11510 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11512 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11513 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11515 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11516 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11518 if ((operand_equal_p (n0, a0, 0)
11519 && operand_equal_p (n1, a1, 0))
11520 || (operand_equal_p (n0, a1, 0)
11521 && operand_equal_p (n1, a0, 0)))
11522 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11525 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11526 if (t1 != NULL_TREE)
11527 return t1;
11529 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11531 This results in more efficient code for machines without a NAND
11532 instruction. Combine will canonicalize to the first form
11533 which will allow use of NAND instructions provided by the
11534 backend if they exist. */
11535 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11536 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11538 return
11539 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11540 build2 (BIT_AND_EXPR, type,
11541 fold_convert_loc (loc, type,
11542 TREE_OPERAND (arg0, 0)),
11543 fold_convert_loc (loc, type,
11544 TREE_OPERAND (arg1, 0))));
11547 /* See if this can be simplified into a rotate first. If that
11548 is unsuccessful continue in the association code. */
11549 goto bit_rotate;
11551 case BIT_XOR_EXPR:
11552 if (integer_zerop (arg1))
11553 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11554 if (integer_all_onesp (arg1))
11555 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11556 if (operand_equal_p (arg0, arg1, 0))
11557 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11559 /* ~X ^ X is -1. */
11560 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11561 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11563 t1 = build_zero_cst (type);
11564 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11565 return omit_one_operand_loc (loc, type, t1, arg1);
11568 /* X ^ ~X is -1. */
11569 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11570 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11572 t1 = build_zero_cst (type);
11573 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11574 return omit_one_operand_loc (loc, type, t1, arg0);
11577 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11578 with a constant, and the two constants have no bits in common,
11579 we should treat this as a BIT_IOR_EXPR since this may produce more
11580 simplifications. */
11581 if (TREE_CODE (arg0) == BIT_AND_EXPR
11582 && TREE_CODE (arg1) == BIT_AND_EXPR
11583 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11584 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11585 && integer_zerop (const_binop (BIT_AND_EXPR,
11586 TREE_OPERAND (arg0, 1),
11587 TREE_OPERAND (arg1, 1))))
11589 code = BIT_IOR_EXPR;
11590 goto bit_ior;
11593 /* (X | Y) ^ X -> Y & ~ X*/
11594 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11595 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11597 tree t2 = TREE_OPERAND (arg0, 1);
11598 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11599 arg1);
11600 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11601 fold_convert_loc (loc, type, t2),
11602 fold_convert_loc (loc, type, t1));
11603 return t1;
11606 /* (Y | X) ^ X -> Y & ~ X*/
11607 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11608 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11610 tree t2 = TREE_OPERAND (arg0, 0);
11611 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11612 arg1);
11613 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11614 fold_convert_loc (loc, type, t2),
11615 fold_convert_loc (loc, type, t1));
11616 return t1;
11619 /* X ^ (X | Y) -> Y & ~ X*/
11620 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11621 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11623 tree t2 = TREE_OPERAND (arg1, 1);
11624 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11625 arg0);
11626 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11627 fold_convert_loc (loc, type, t2),
11628 fold_convert_loc (loc, type, t1));
11629 return t1;
11632 /* X ^ (Y | X) -> Y & ~ X*/
11633 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11634 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11636 tree t2 = TREE_OPERAND (arg1, 0);
11637 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11638 arg0);
11639 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11640 fold_convert_loc (loc, type, t2),
11641 fold_convert_loc (loc, type, t1));
11642 return t1;
11645 /* Convert ~X ^ ~Y to X ^ Y. */
11646 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11647 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11648 return fold_build2_loc (loc, code, type,
11649 fold_convert_loc (loc, type,
11650 TREE_OPERAND (arg0, 0)),
11651 fold_convert_loc (loc, type,
11652 TREE_OPERAND (arg1, 0)));
11654 /* Convert ~X ^ C to X ^ ~C. */
11655 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11656 && TREE_CODE (arg1) == INTEGER_CST)
11657 return fold_build2_loc (loc, code, type,
11658 fold_convert_loc (loc, type,
11659 TREE_OPERAND (arg0, 0)),
11660 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11662 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11663 if (TREE_CODE (arg0) == BIT_AND_EXPR
11664 && integer_onep (TREE_OPERAND (arg0, 1))
11665 && integer_onep (arg1))
11666 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11667 build_zero_cst (TREE_TYPE (arg0)));
11669 /* Fold (X & Y) ^ Y as ~X & Y. */
11670 if (TREE_CODE (arg0) == BIT_AND_EXPR
11671 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11673 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11674 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11675 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11676 fold_convert_loc (loc, type, arg1));
11678 /* Fold (X & Y) ^ X as ~Y & X. */
11679 if (TREE_CODE (arg0) == BIT_AND_EXPR
11680 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11681 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11683 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11684 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11685 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11686 fold_convert_loc (loc, type, arg1));
11688 /* Fold X ^ (X & Y) as X & ~Y. */
11689 if (TREE_CODE (arg1) == BIT_AND_EXPR
11690 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11692 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11693 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11694 fold_convert_loc (loc, type, arg0),
11695 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11697 /* Fold X ^ (Y & X) as ~Y & X. */
11698 if (TREE_CODE (arg1) == BIT_AND_EXPR
11699 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11700 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11702 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11703 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11704 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11705 fold_convert_loc (loc, type, arg0));
11708 /* See if this can be simplified into a rotate first. If that
11709 is unsuccessful continue in the association code. */
11710 goto bit_rotate;
11712 case BIT_AND_EXPR:
11713 if (integer_all_onesp (arg1))
11714 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11715 if (integer_zerop (arg1))
11716 return omit_one_operand_loc (loc, type, arg1, arg0);
11717 if (operand_equal_p (arg0, arg1, 0))
11718 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11720 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11721 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11722 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11723 || (TREE_CODE (arg0) == EQ_EXPR
11724 && integer_zerop (TREE_OPERAND (arg0, 1))))
11725 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11726 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11728 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11729 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11730 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11731 || (TREE_CODE (arg1) == EQ_EXPR
11732 && integer_zerop (TREE_OPERAND (arg1, 1))))
11733 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11734 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11736 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11737 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11738 && TREE_CODE (arg1) == INTEGER_CST
11739 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11741 tree tmp1 = fold_convert_loc (loc, type, arg1);
11742 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11743 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11744 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11745 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11746 return
11747 fold_convert_loc (loc, type,
11748 fold_build2_loc (loc, BIT_IOR_EXPR,
11749 type, tmp2, tmp3));
11752 /* (X | Y) & Y is (X, Y). */
11753 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11754 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11755 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11756 /* (X | Y) & X is (Y, X). */
11757 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11758 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11759 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11760 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11761 /* X & (X | Y) is (Y, X). */
11762 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11763 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11764 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11765 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11766 /* X & (Y | X) is (Y, X). */
11767 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11768 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11769 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11770 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11772 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11773 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11774 && integer_onep (TREE_OPERAND (arg0, 1))
11775 && integer_onep (arg1))
11777 tree tem2;
11778 tem = TREE_OPERAND (arg0, 0);
11779 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11780 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11781 tem, tem2);
11782 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11783 build_zero_cst (TREE_TYPE (tem)));
11785 /* Fold ~X & 1 as (X & 1) == 0. */
11786 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11787 && integer_onep (arg1))
11789 tree tem2;
11790 tem = TREE_OPERAND (arg0, 0);
11791 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11792 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11793 tem, tem2);
11794 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11795 build_zero_cst (TREE_TYPE (tem)));
11797 /* Fold !X & 1 as X == 0. */
11798 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11799 && integer_onep (arg1))
11801 tem = TREE_OPERAND (arg0, 0);
11802 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11803 build_zero_cst (TREE_TYPE (tem)));
11806 /* Fold (X ^ Y) & Y as ~X & Y. */
11807 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11808 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11810 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11811 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11812 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11813 fold_convert_loc (loc, type, arg1));
11815 /* Fold (X ^ Y) & X as ~Y & X. */
11816 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11817 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11818 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11820 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11821 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11822 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11823 fold_convert_loc (loc, type, arg1));
11825 /* Fold X & (X ^ Y) as X & ~Y. */
11826 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11827 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11829 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11830 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11831 fold_convert_loc (loc, type, arg0),
11832 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11834 /* Fold X & (Y ^ X) as ~Y & X. */
11835 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11836 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11837 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11839 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11840 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11841 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11842 fold_convert_loc (loc, type, arg0));
11845 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11846 multiple of 1 << CST. */
11847 if (TREE_CODE (arg1) == INTEGER_CST)
11849 double_int cst1 = tree_to_double_int (arg1);
11850 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11851 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11852 if ((cst1 & ncst1) == ncst1
11853 && multiple_of_p (type, arg0,
11854 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11855 return fold_convert_loc (loc, type, arg0);
11858 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11859 bits from CST2. */
11860 if (TREE_CODE (arg1) == INTEGER_CST
11861 && TREE_CODE (arg0) == MULT_EXPR
11862 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11864 double_int darg1 = tree_to_double_int (arg1);
11865 double_int masked
11866 = mask_with_tz (type, darg1,
11867 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11869 if (masked.is_zero ())
11870 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11871 arg0, arg1);
11872 else if (masked != darg1)
11874 /* Avoid the transform if arg1 is a mask of some
11875 mode which allows further optimizations. */
11876 int pop = darg1.popcount ();
11877 if (!(pop >= BITS_PER_UNIT
11878 && exact_log2 (pop) != -1
11879 && double_int::mask (pop) == darg1))
11880 return fold_build2_loc (loc, code, type, op0,
11881 double_int_to_tree (type, masked));
11885 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11886 ((A & N) + B) & M -> (A + B) & M
11887 Similarly if (N & M) == 0,
11888 ((A | N) + B) & M -> (A + B) & M
11889 and for - instead of + (or unary - instead of +)
11890 and/or ^ instead of |.
11891 If B is constant and (B & M) == 0, fold into A & M. */
11892 if (tree_fits_uhwi_p (arg1))
11894 unsigned HOST_WIDE_INT cst1 = tree_to_uhwi (arg1);
11895 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11896 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11897 && (TREE_CODE (arg0) == PLUS_EXPR
11898 || TREE_CODE (arg0) == MINUS_EXPR
11899 || TREE_CODE (arg0) == NEGATE_EXPR)
11900 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11901 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11903 tree pmop[2];
11904 int which = 0;
11905 unsigned HOST_WIDE_INT cst0;
11907 /* Now we know that arg0 is (C + D) or (C - D) or
11908 -C and arg1 (M) is == (1LL << cst) - 1.
11909 Store C into PMOP[0] and D into PMOP[1]. */
11910 pmop[0] = TREE_OPERAND (arg0, 0);
11911 pmop[1] = NULL;
11912 if (TREE_CODE (arg0) != NEGATE_EXPR)
11914 pmop[1] = TREE_OPERAND (arg0, 1);
11915 which = 1;
11918 if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11919 || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11920 & cst1) != cst1)
11921 which = -1;
11923 for (; which >= 0; which--)
11924 switch (TREE_CODE (pmop[which]))
11926 case BIT_AND_EXPR:
11927 case BIT_IOR_EXPR:
11928 case BIT_XOR_EXPR:
11929 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11930 != INTEGER_CST)
11931 break;
11932 /* tree_to_[su]hwi not used, because we don't care about
11933 the upper bits. */
11934 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11935 cst0 &= cst1;
11936 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11938 if (cst0 != cst1)
11939 break;
11941 else if (cst0 != 0)
11942 break;
11943 /* If C or D is of the form (A & N) where
11944 (N & M) == M, or of the form (A | N) or
11945 (A ^ N) where (N & M) == 0, replace it with A. */
11946 pmop[which] = TREE_OPERAND (pmop[which], 0);
11947 break;
11948 case INTEGER_CST:
11949 /* If C or D is a N where (N & M) == 0, it can be
11950 omitted (assumed 0). */
11951 if ((TREE_CODE (arg0) == PLUS_EXPR
11952 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11953 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11954 pmop[which] = NULL;
11955 break;
11956 default:
11957 break;
11960 /* Only build anything new if we optimized one or both arguments
11961 above. */
11962 if (pmop[0] != TREE_OPERAND (arg0, 0)
11963 || (TREE_CODE (arg0) != NEGATE_EXPR
11964 && pmop[1] != TREE_OPERAND (arg0, 1)))
11966 tree utype = TREE_TYPE (arg0);
11967 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11969 /* Perform the operations in a type that has defined
11970 overflow behavior. */
11971 utype = unsigned_type_for (TREE_TYPE (arg0));
11972 if (pmop[0] != NULL)
11973 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11974 if (pmop[1] != NULL)
11975 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11978 if (TREE_CODE (arg0) == NEGATE_EXPR)
11979 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11980 else if (TREE_CODE (arg0) == PLUS_EXPR)
11982 if (pmop[0] != NULL && pmop[1] != NULL)
11983 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11984 pmop[0], pmop[1]);
11985 else if (pmop[0] != NULL)
11986 tem = pmop[0];
11987 else if (pmop[1] != NULL)
11988 tem = pmop[1];
11989 else
11990 return build_int_cst (type, 0);
11992 else if (pmop[0] == NULL)
11993 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11994 else
11995 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11996 pmop[0], pmop[1]);
11997 /* TEM is now the new binary +, - or unary - replacement. */
11998 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11999 fold_convert_loc (loc, utype, arg1));
12000 return fold_convert_loc (loc, type, tem);
12005 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
12006 if (t1 != NULL_TREE)
12007 return t1;
12008 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12009 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12010 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12012 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12014 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
12015 && (~TREE_INT_CST_LOW (arg1)
12016 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
12017 return
12018 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12021 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12023 This results in more efficient code for machines without a NOR
12024 instruction. Combine will canonicalize to the first form
12025 which will allow use of NOR instructions provided by the
12026 backend if they exist. */
12027 if (TREE_CODE (arg0) == BIT_NOT_EXPR
12028 && TREE_CODE (arg1) == BIT_NOT_EXPR)
12030 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
12031 build2 (BIT_IOR_EXPR, type,
12032 fold_convert_loc (loc, type,
12033 TREE_OPERAND (arg0, 0)),
12034 fold_convert_loc (loc, type,
12035 TREE_OPERAND (arg1, 0))));
12038 /* If arg0 is derived from the address of an object or function, we may
12039 be able to fold this expression using the object or function's
12040 alignment. */
12041 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
12043 unsigned HOST_WIDE_INT modulus, residue;
12044 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
12046 modulus = get_pointer_modulus_and_residue (arg0, &residue,
12047 integer_onep (arg1));
12049 /* This works because modulus is a power of 2. If this weren't the
12050 case, we'd have to replace it by its greatest power-of-2
12051 divisor: modulus & -modulus. */
12052 if (low < modulus)
12053 return build_int_cst (type, residue & low);
12056 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12057 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12058 if the new mask might be further optimized. */
12059 if ((TREE_CODE (arg0) == LSHIFT_EXPR
12060 || TREE_CODE (arg0) == RSHIFT_EXPR)
12061 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12062 && TREE_CODE (arg1) == INTEGER_CST
12063 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12064 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12065 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12066 < TYPE_PRECISION (TREE_TYPE (arg0))))
12068 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12069 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12070 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12071 tree shift_type = TREE_TYPE (arg0);
12073 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12074 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12075 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12076 && TYPE_PRECISION (TREE_TYPE (arg0))
12077 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12079 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12080 tree arg00 = TREE_OPERAND (arg0, 0);
12081 /* See if more bits can be proven as zero because of
12082 zero extension. */
12083 if (TREE_CODE (arg00) == NOP_EXPR
12084 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12086 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12087 if (TYPE_PRECISION (inner_type)
12088 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12089 && TYPE_PRECISION (inner_type) < prec)
12091 prec = TYPE_PRECISION (inner_type);
12092 /* See if we can shorten the right shift. */
12093 if (shiftc < prec)
12094 shift_type = inner_type;
12095 /* Otherwise X >> C1 is all zeros, so we'll optimize
12096 it into (X, 0) later on by making sure zerobits
12097 is all ones. */
12100 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12101 if (shiftc < prec)
12103 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12104 zerobits <<= prec - shiftc;
12106 /* For arithmetic shift if sign bit could be set, zerobits
12107 can contain actually sign bits, so no transformation is
12108 possible, unless MASK masks them all away. In that
12109 case the shift needs to be converted into logical shift. */
12110 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12111 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12113 if ((mask & zerobits) == 0)
12114 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12115 else
12116 zerobits = 0;
12120 /* ((X << 16) & 0xff00) is (X, 0). */
12121 if ((mask & zerobits) == mask)
12122 return omit_one_operand_loc (loc, type,
12123 build_int_cst (type, 0), arg0);
12125 newmask = mask | zerobits;
12126 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12128 /* Only do the transformation if NEWMASK is some integer
12129 mode's mask. */
12130 for (prec = BITS_PER_UNIT;
12131 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12132 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12133 break;
12134 if (prec < HOST_BITS_PER_WIDE_INT
12135 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12137 tree newmaskt;
12139 if (shift_type != TREE_TYPE (arg0))
12141 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12142 fold_convert_loc (loc, shift_type,
12143 TREE_OPERAND (arg0, 0)),
12144 TREE_OPERAND (arg0, 1));
12145 tem = fold_convert_loc (loc, type, tem);
12147 else
12148 tem = op0;
12149 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12150 if (!tree_int_cst_equal (newmaskt, arg1))
12151 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12156 goto associate;
12158 case RDIV_EXPR:
12159 /* Don't touch a floating-point divide by zero unless the mode
12160 of the constant can represent infinity. */
12161 if (TREE_CODE (arg1) == REAL_CST
12162 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12163 && real_zerop (arg1))
12164 return NULL_TREE;
12166 /* Optimize A / A to 1.0 if we don't care about
12167 NaNs or Infinities. Skip the transformation
12168 for non-real operands. */
12169 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12170 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12171 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12172 && operand_equal_p (arg0, arg1, 0))
12174 tree r = build_real (TREE_TYPE (arg0), dconst1);
12176 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12179 /* The complex version of the above A / A optimization. */
12180 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12181 && operand_equal_p (arg0, arg1, 0))
12183 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12184 if (! HONOR_NANS (TYPE_MODE (elem_type))
12185 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12187 tree r = build_real (elem_type, dconst1);
12188 /* omit_two_operands will call fold_convert for us. */
12189 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12193 /* (-A) / (-B) -> A / B */
12194 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12195 return fold_build2_loc (loc, RDIV_EXPR, type,
12196 TREE_OPERAND (arg0, 0),
12197 negate_expr (arg1));
12198 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12199 return fold_build2_loc (loc, RDIV_EXPR, type,
12200 negate_expr (arg0),
12201 TREE_OPERAND (arg1, 0));
12203 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12204 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12205 && real_onep (arg1))
12206 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12208 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12209 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12210 && real_minus_onep (arg1))
12211 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12212 negate_expr (arg0)));
12214 /* If ARG1 is a constant, we can convert this to a multiply by the
12215 reciprocal. This does not have the same rounding properties,
12216 so only do this if -freciprocal-math. We can actually
12217 always safely do it if ARG1 is a power of two, but it's hard to
12218 tell if it is or not in a portable manner. */
12219 if (optimize
12220 && (TREE_CODE (arg1) == REAL_CST
12221 || (TREE_CODE (arg1) == COMPLEX_CST
12222 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12223 || (TREE_CODE (arg1) == VECTOR_CST
12224 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12226 if (flag_reciprocal_math
12227 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12228 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12229 /* Find the reciprocal if optimizing and the result is exact.
12230 TODO: Complex reciprocal not implemented. */
12231 if (TREE_CODE (arg1) != COMPLEX_CST)
12233 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12235 if (inverse)
12236 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12239 /* Convert A/B/C to A/(B*C). */
12240 if (flag_reciprocal_math
12241 && TREE_CODE (arg0) == RDIV_EXPR)
12242 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12243 fold_build2_loc (loc, MULT_EXPR, type,
12244 TREE_OPERAND (arg0, 1), arg1));
12246 /* Convert A/(B/C) to (A/B)*C. */
12247 if (flag_reciprocal_math
12248 && TREE_CODE (arg1) == RDIV_EXPR)
12249 return fold_build2_loc (loc, MULT_EXPR, type,
12250 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12251 TREE_OPERAND (arg1, 0)),
12252 TREE_OPERAND (arg1, 1));
12254 /* Convert C1/(X*C2) into (C1/C2)/X. */
12255 if (flag_reciprocal_math
12256 && TREE_CODE (arg1) == MULT_EXPR
12257 && TREE_CODE (arg0) == REAL_CST
12258 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12260 tree tem = const_binop (RDIV_EXPR, arg0,
12261 TREE_OPERAND (arg1, 1));
12262 if (tem)
12263 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12264 TREE_OPERAND (arg1, 0));
12267 if (flag_unsafe_math_optimizations)
12269 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12270 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12272 /* Optimize sin(x)/cos(x) as tan(x). */
12273 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12274 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12275 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12276 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12277 CALL_EXPR_ARG (arg1, 0), 0))
12279 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12281 if (tanfn != NULL_TREE)
12282 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12285 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12286 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12287 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12288 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12289 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12290 CALL_EXPR_ARG (arg1, 0), 0))
12292 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12294 if (tanfn != NULL_TREE)
12296 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12297 CALL_EXPR_ARG (arg0, 0));
12298 return fold_build2_loc (loc, RDIV_EXPR, type,
12299 build_real (type, dconst1), tmp);
12303 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12304 NaNs or Infinities. */
12305 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12306 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12307 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12309 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12310 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12312 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12313 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12314 && operand_equal_p (arg00, arg01, 0))
12316 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12318 if (cosfn != NULL_TREE)
12319 return build_call_expr_loc (loc, cosfn, 1, arg00);
12323 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12324 NaNs or Infinities. */
12325 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12326 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12327 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12329 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12330 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12332 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12333 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12334 && operand_equal_p (arg00, arg01, 0))
12336 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12338 if (cosfn != NULL_TREE)
12340 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12341 return fold_build2_loc (loc, RDIV_EXPR, type,
12342 build_real (type, dconst1),
12343 tmp);
12348 /* Optimize pow(x,c)/x as pow(x,c-1). */
12349 if (fcode0 == BUILT_IN_POW
12350 || fcode0 == BUILT_IN_POWF
12351 || fcode0 == BUILT_IN_POWL)
12353 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12354 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12355 if (TREE_CODE (arg01) == REAL_CST
12356 && !TREE_OVERFLOW (arg01)
12357 && operand_equal_p (arg1, arg00, 0))
12359 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12360 REAL_VALUE_TYPE c;
12361 tree arg;
12363 c = TREE_REAL_CST (arg01);
12364 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12365 arg = build_real (type, c);
12366 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12370 /* Optimize a/root(b/c) into a*root(c/b). */
12371 if (BUILTIN_ROOT_P (fcode1))
12373 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12375 if (TREE_CODE (rootarg) == RDIV_EXPR)
12377 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12378 tree b = TREE_OPERAND (rootarg, 0);
12379 tree c = TREE_OPERAND (rootarg, 1);
12381 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12383 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12384 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12388 /* Optimize x/expN(y) into x*expN(-y). */
12389 if (BUILTIN_EXPONENT_P (fcode1))
12391 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12392 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12393 arg1 = build_call_expr_loc (loc,
12394 expfn, 1,
12395 fold_convert_loc (loc, type, arg));
12396 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12399 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12400 if (fcode1 == BUILT_IN_POW
12401 || fcode1 == BUILT_IN_POWF
12402 || fcode1 == BUILT_IN_POWL)
12404 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12405 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12406 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12407 tree neg11 = fold_convert_loc (loc, type,
12408 negate_expr (arg11));
12409 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12410 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12413 return NULL_TREE;
12415 case TRUNC_DIV_EXPR:
12416 /* Optimize (X & (-A)) / A where A is a power of 2,
12417 to X >> log2(A) */
12418 if (TREE_CODE (arg0) == BIT_AND_EXPR
12419 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12420 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12422 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12423 arg1, TREE_OPERAND (arg0, 1));
12424 if (sum && integer_zerop (sum)) {
12425 unsigned long pow2;
12427 if (TREE_INT_CST_LOW (arg1))
12428 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12429 else
12430 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12431 + HOST_BITS_PER_WIDE_INT;
12433 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12434 TREE_OPERAND (arg0, 0),
12435 build_int_cst (integer_type_node, pow2));
12439 /* Fall through */
12441 case FLOOR_DIV_EXPR:
12442 /* Simplify A / (B << N) where A and B are positive and B is
12443 a power of 2, to A >> (N + log2(B)). */
12444 strict_overflow_p = false;
12445 if (TREE_CODE (arg1) == LSHIFT_EXPR
12446 && (TYPE_UNSIGNED (type)
12447 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12449 tree sval = TREE_OPERAND (arg1, 0);
12450 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12452 tree sh_cnt = TREE_OPERAND (arg1, 1);
12453 unsigned long pow2;
12455 if (TREE_INT_CST_LOW (sval))
12456 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12457 else
12458 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12459 + HOST_BITS_PER_WIDE_INT;
12461 if (strict_overflow_p)
12462 fold_overflow_warning (("assuming signed overflow does not "
12463 "occur when simplifying A / (B << N)"),
12464 WARN_STRICT_OVERFLOW_MISC);
12466 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12467 sh_cnt,
12468 build_int_cst (TREE_TYPE (sh_cnt),
12469 pow2));
12470 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12471 fold_convert_loc (loc, type, arg0), sh_cnt);
12475 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12476 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12477 if (INTEGRAL_TYPE_P (type)
12478 && TYPE_UNSIGNED (type)
12479 && code == FLOOR_DIV_EXPR)
12480 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12482 /* Fall through */
12484 case ROUND_DIV_EXPR:
12485 case CEIL_DIV_EXPR:
12486 case EXACT_DIV_EXPR:
12487 if (integer_onep (arg1))
12488 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12489 if (integer_zerop (arg1))
12490 return NULL_TREE;
12491 /* X / -1 is -X. */
12492 if (!TYPE_UNSIGNED (type)
12493 && TREE_CODE (arg1) == INTEGER_CST
12494 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12495 && TREE_INT_CST_HIGH (arg1) == -1)
12496 return fold_convert_loc (loc, type, negate_expr (arg0));
12498 /* Convert -A / -B to A / B when the type is signed and overflow is
12499 undefined. */
12500 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12501 && TREE_CODE (arg0) == NEGATE_EXPR
12502 && negate_expr_p (arg1))
12504 if (INTEGRAL_TYPE_P (type))
12505 fold_overflow_warning (("assuming signed overflow does not occur "
12506 "when distributing negation across "
12507 "division"),
12508 WARN_STRICT_OVERFLOW_MISC);
12509 return fold_build2_loc (loc, code, type,
12510 fold_convert_loc (loc, type,
12511 TREE_OPERAND (arg0, 0)),
12512 fold_convert_loc (loc, type,
12513 negate_expr (arg1)));
12515 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12516 && TREE_CODE (arg1) == NEGATE_EXPR
12517 && negate_expr_p (arg0))
12519 if (INTEGRAL_TYPE_P (type))
12520 fold_overflow_warning (("assuming signed overflow does not occur "
12521 "when distributing negation across "
12522 "division"),
12523 WARN_STRICT_OVERFLOW_MISC);
12524 return fold_build2_loc (loc, code, type,
12525 fold_convert_loc (loc, type,
12526 negate_expr (arg0)),
12527 fold_convert_loc (loc, type,
12528 TREE_OPERAND (arg1, 0)));
12531 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12532 operation, EXACT_DIV_EXPR.
12534 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12535 At one time others generated faster code, it's not clear if they do
12536 after the last round to changes to the DIV code in expmed.c. */
12537 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12538 && multiple_of_p (type, arg0, arg1))
12539 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12541 strict_overflow_p = false;
12542 if (TREE_CODE (arg1) == INTEGER_CST
12543 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12544 &strict_overflow_p)))
12546 if (strict_overflow_p)
12547 fold_overflow_warning (("assuming signed overflow does not occur "
12548 "when simplifying division"),
12549 WARN_STRICT_OVERFLOW_MISC);
12550 return fold_convert_loc (loc, type, tem);
12553 return NULL_TREE;
12555 case CEIL_MOD_EXPR:
12556 case FLOOR_MOD_EXPR:
12557 case ROUND_MOD_EXPR:
12558 case TRUNC_MOD_EXPR:
12559 /* X % 1 is always zero, but be sure to preserve any side
12560 effects in X. */
12561 if (integer_onep (arg1))
12562 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12564 /* X % 0, return X % 0 unchanged so that we can get the
12565 proper warnings and errors. */
12566 if (integer_zerop (arg1))
12567 return NULL_TREE;
12569 /* 0 % X is always zero, but be sure to preserve any side
12570 effects in X. Place this after checking for X == 0. */
12571 if (integer_zerop (arg0))
12572 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12574 /* X % -1 is zero. */
12575 if (!TYPE_UNSIGNED (type)
12576 && TREE_CODE (arg1) == INTEGER_CST
12577 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12578 && TREE_INT_CST_HIGH (arg1) == -1)
12579 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12581 /* X % -C is the same as X % C. */
12582 if (code == TRUNC_MOD_EXPR
12583 && !TYPE_UNSIGNED (type)
12584 && TREE_CODE (arg1) == INTEGER_CST
12585 && !TREE_OVERFLOW (arg1)
12586 && TREE_INT_CST_HIGH (arg1) < 0
12587 && !TYPE_OVERFLOW_TRAPS (type)
12588 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12589 && !sign_bit_p (arg1, arg1))
12590 return fold_build2_loc (loc, code, type,
12591 fold_convert_loc (loc, type, arg0),
12592 fold_convert_loc (loc, type,
12593 negate_expr (arg1)));
12595 /* X % -Y is the same as X % Y. */
12596 if (code == TRUNC_MOD_EXPR
12597 && !TYPE_UNSIGNED (type)
12598 && TREE_CODE (arg1) == NEGATE_EXPR
12599 && !TYPE_OVERFLOW_TRAPS (type))
12600 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12601 fold_convert_loc (loc, type,
12602 TREE_OPERAND (arg1, 0)));
12604 strict_overflow_p = false;
12605 if (TREE_CODE (arg1) == INTEGER_CST
12606 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12607 &strict_overflow_p)))
12609 if (strict_overflow_p)
12610 fold_overflow_warning (("assuming signed overflow does not occur "
12611 "when simplifying modulus"),
12612 WARN_STRICT_OVERFLOW_MISC);
12613 return fold_convert_loc (loc, type, tem);
12616 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12617 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12618 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12619 && (TYPE_UNSIGNED (type)
12620 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12622 tree c = arg1;
12623 /* Also optimize A % (C << N) where C is a power of 2,
12624 to A & ((C << N) - 1). */
12625 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12626 c = TREE_OPERAND (arg1, 0);
12628 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12630 tree mask
12631 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12632 build_int_cst (TREE_TYPE (arg1), 1));
12633 if (strict_overflow_p)
12634 fold_overflow_warning (("assuming signed overflow does not "
12635 "occur when simplifying "
12636 "X % (power of two)"),
12637 WARN_STRICT_OVERFLOW_MISC);
12638 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12639 fold_convert_loc (loc, type, arg0),
12640 fold_convert_loc (loc, type, mask));
12644 return NULL_TREE;
12646 case LROTATE_EXPR:
12647 case RROTATE_EXPR:
12648 if (integer_all_onesp (arg0))
12649 return omit_one_operand_loc (loc, type, arg0, arg1);
12650 goto shift;
12652 case RSHIFT_EXPR:
12653 /* Optimize -1 >> x for arithmetic right shifts. */
12654 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12655 && tree_expr_nonnegative_p (arg1))
12656 return omit_one_operand_loc (loc, type, arg0, arg1);
12657 /* ... fall through ... */
12659 case LSHIFT_EXPR:
12660 shift:
12661 if (integer_zerop (arg1))
12662 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12663 if (integer_zerop (arg0))
12664 return omit_one_operand_loc (loc, type, arg0, arg1);
12666 /* Prefer vector1 << scalar to vector1 << vector2
12667 if vector2 is uniform. */
12668 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12669 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12670 return fold_build2_loc (loc, code, type, op0, tem);
12672 /* Since negative shift count is not well-defined,
12673 don't try to compute it in the compiler. */
12674 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12675 return NULL_TREE;
12677 prec = element_precision (type);
12679 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12680 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12681 && tree_to_uhwi (arg1) < prec
12682 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12683 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12685 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12686 + tree_to_uhwi (arg1));
12688 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12689 being well defined. */
12690 if (low >= prec)
12692 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12693 low = low % prec;
12694 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12695 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12696 TREE_OPERAND (arg0, 0));
12697 else
12698 low = prec - 1;
12701 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12702 build_int_cst (TREE_TYPE (arg1), low));
12705 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12706 into x & ((unsigned)-1 >> c) for unsigned types. */
12707 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12708 || (TYPE_UNSIGNED (type)
12709 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12710 && tree_fits_uhwi_p (arg1)
12711 && tree_to_uhwi (arg1) < prec
12712 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12713 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12715 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12716 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12717 tree lshift;
12718 tree arg00;
12720 if (low0 == low1)
12722 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12724 lshift = build_minus_one_cst (type);
12725 lshift = const_binop (code, lshift, arg1);
12727 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12731 /* Rewrite an LROTATE_EXPR by a constant into an
12732 RROTATE_EXPR by a new constant. */
12733 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12735 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12736 tem = const_binop (MINUS_EXPR, tem, arg1);
12737 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12740 /* If we have a rotate of a bit operation with the rotate count and
12741 the second operand of the bit operation both constant,
12742 permute the two operations. */
12743 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12744 && (TREE_CODE (arg0) == BIT_AND_EXPR
12745 || TREE_CODE (arg0) == BIT_IOR_EXPR
12746 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12747 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12748 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12749 fold_build2_loc (loc, code, type,
12750 TREE_OPERAND (arg0, 0), arg1),
12751 fold_build2_loc (loc, code, type,
12752 TREE_OPERAND (arg0, 1), arg1));
12754 /* Two consecutive rotates adding up to the precision of the
12755 type can be ignored. */
12756 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12757 && TREE_CODE (arg0) == RROTATE_EXPR
12758 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12759 && TREE_INT_CST_HIGH (arg1) == 0
12760 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12761 && ((TREE_INT_CST_LOW (arg1)
12762 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12763 == prec))
12764 return TREE_OPERAND (arg0, 0);
12766 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12767 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12768 if the latter can be further optimized. */
12769 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12770 && TREE_CODE (arg0) == BIT_AND_EXPR
12771 && TREE_CODE (arg1) == INTEGER_CST
12772 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12774 tree mask = fold_build2_loc (loc, code, type,
12775 fold_convert_loc (loc, type,
12776 TREE_OPERAND (arg0, 1)),
12777 arg1);
12778 tree shift = fold_build2_loc (loc, code, type,
12779 fold_convert_loc (loc, type,
12780 TREE_OPERAND (arg0, 0)),
12781 arg1);
12782 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12783 if (tem)
12784 return tem;
12787 return NULL_TREE;
12789 case MIN_EXPR:
12790 if (operand_equal_p (arg0, arg1, 0))
12791 return omit_one_operand_loc (loc, type, arg0, arg1);
12792 if (INTEGRAL_TYPE_P (type)
12793 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12794 return omit_one_operand_loc (loc, type, arg1, arg0);
12795 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12796 if (tem)
12797 return tem;
12798 goto associate;
12800 case MAX_EXPR:
12801 if (operand_equal_p (arg0, arg1, 0))
12802 return omit_one_operand_loc (loc, type, arg0, arg1);
12803 if (INTEGRAL_TYPE_P (type)
12804 && TYPE_MAX_VALUE (type)
12805 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12806 return omit_one_operand_loc (loc, type, arg1, arg0);
12807 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12808 if (tem)
12809 return tem;
12810 goto associate;
12812 case TRUTH_ANDIF_EXPR:
12813 /* Note that the operands of this must be ints
12814 and their values must be 0 or 1.
12815 ("true" is a fixed value perhaps depending on the language.) */
12816 /* If first arg is constant zero, return it. */
12817 if (integer_zerop (arg0))
12818 return fold_convert_loc (loc, type, arg0);
12819 case TRUTH_AND_EXPR:
12820 /* If either arg is constant true, drop it. */
12821 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12822 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12823 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12824 /* Preserve sequence points. */
12825 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12826 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12827 /* If second arg is constant zero, result is zero, but first arg
12828 must be evaluated. */
12829 if (integer_zerop (arg1))
12830 return omit_one_operand_loc (loc, type, arg1, arg0);
12831 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12832 case will be handled here. */
12833 if (integer_zerop (arg0))
12834 return omit_one_operand_loc (loc, type, arg0, arg1);
12836 /* !X && X is always false. */
12837 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12838 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12839 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12840 /* X && !X is always false. */
12841 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12842 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12843 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12845 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12846 means A >= Y && A != MAX, but in this case we know that
12847 A < X <= MAX. */
12849 if (!TREE_SIDE_EFFECTS (arg0)
12850 && !TREE_SIDE_EFFECTS (arg1))
12852 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12853 if (tem && !operand_equal_p (tem, arg0, 0))
12854 return fold_build2_loc (loc, code, type, tem, arg1);
12856 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12857 if (tem && !operand_equal_p (tem, arg1, 0))
12858 return fold_build2_loc (loc, code, type, arg0, tem);
12861 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12862 != NULL_TREE)
12863 return tem;
12865 return NULL_TREE;
12867 case TRUTH_ORIF_EXPR:
12868 /* Note that the operands of this must be ints
12869 and their values must be 0 or true.
12870 ("true" is a fixed value perhaps depending on the language.) */
12871 /* If first arg is constant true, return it. */
12872 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12873 return fold_convert_loc (loc, type, arg0);
12874 case TRUTH_OR_EXPR:
12875 /* If either arg is constant zero, drop it. */
12876 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12877 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12878 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12879 /* Preserve sequence points. */
12880 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12881 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12882 /* If second arg is constant true, result is true, but we must
12883 evaluate first arg. */
12884 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12885 return omit_one_operand_loc (loc, type, arg1, arg0);
12886 /* Likewise for first arg, but note this only occurs here for
12887 TRUTH_OR_EXPR. */
12888 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12889 return omit_one_operand_loc (loc, type, arg0, arg1);
12891 /* !X || X is always true. */
12892 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12893 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12894 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12895 /* X || !X is always true. */
12896 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12897 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12898 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12900 /* (X && !Y) || (!X && Y) is X ^ Y */
12901 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12902 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12904 tree a0, a1, l0, l1, n0, n1;
12906 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12907 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12909 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12910 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12912 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12913 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12915 if ((operand_equal_p (n0, a0, 0)
12916 && operand_equal_p (n1, a1, 0))
12917 || (operand_equal_p (n0, a1, 0)
12918 && operand_equal_p (n1, a0, 0)))
12919 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12922 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12923 != NULL_TREE)
12924 return tem;
12926 return NULL_TREE;
12928 case TRUTH_XOR_EXPR:
12929 /* If the second arg is constant zero, drop it. */
12930 if (integer_zerop (arg1))
12931 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12932 /* If the second arg is constant true, this is a logical inversion. */
12933 if (integer_onep (arg1))
12935 tem = invert_truthvalue_loc (loc, arg0);
12936 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12938 /* Identical arguments cancel to zero. */
12939 if (operand_equal_p (arg0, arg1, 0))
12940 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12942 /* !X ^ X is always true. */
12943 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12944 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12945 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12947 /* X ^ !X is always true. */
12948 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12949 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12950 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12952 return NULL_TREE;
12954 case EQ_EXPR:
12955 case NE_EXPR:
12956 STRIP_NOPS (arg0);
12957 STRIP_NOPS (arg1);
12959 tem = fold_comparison (loc, code, type, op0, op1);
12960 if (tem != NULL_TREE)
12961 return tem;
12963 /* bool_var != 0 becomes bool_var. */
12964 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12965 && code == NE_EXPR)
12966 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12968 /* bool_var == 1 becomes bool_var. */
12969 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12970 && code == EQ_EXPR)
12971 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12973 /* bool_var != 1 becomes !bool_var. */
12974 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12975 && code == NE_EXPR)
12976 return fold_convert_loc (loc, type,
12977 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12978 TREE_TYPE (arg0), arg0));
12980 /* bool_var == 0 becomes !bool_var. */
12981 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12982 && code == EQ_EXPR)
12983 return fold_convert_loc (loc, type,
12984 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12985 TREE_TYPE (arg0), arg0));
12987 /* !exp != 0 becomes !exp */
12988 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12989 && code == NE_EXPR)
12990 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12992 /* If this is an equality comparison of the address of two non-weak,
12993 unaliased symbols neither of which are extern (since we do not
12994 have access to attributes for externs), then we know the result. */
12995 if (TREE_CODE (arg0) == ADDR_EXPR
12996 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12997 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12998 && ! lookup_attribute ("alias",
12999 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
13000 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
13001 && TREE_CODE (arg1) == ADDR_EXPR
13002 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
13003 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
13004 && ! lookup_attribute ("alias",
13005 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
13006 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
13008 /* We know that we're looking at the address of two
13009 non-weak, unaliased, static _DECL nodes.
13011 It is both wasteful and incorrect to call operand_equal_p
13012 to compare the two ADDR_EXPR nodes. It is wasteful in that
13013 all we need to do is test pointer equality for the arguments
13014 to the two ADDR_EXPR nodes. It is incorrect to use
13015 operand_equal_p as that function is NOT equivalent to a
13016 C equality test. It can in fact return false for two
13017 objects which would test as equal using the C equality
13018 operator. */
13019 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
13020 return constant_boolean_node (equal
13021 ? code == EQ_EXPR : code != EQ_EXPR,
13022 type);
13025 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
13026 a MINUS_EXPR of a constant, we can convert it into a comparison with
13027 a revised constant as long as no overflow occurs. */
13028 if (TREE_CODE (arg1) == INTEGER_CST
13029 && (TREE_CODE (arg0) == PLUS_EXPR
13030 || TREE_CODE (arg0) == MINUS_EXPR)
13031 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13032 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
13033 ? MINUS_EXPR : PLUS_EXPR,
13034 fold_convert_loc (loc, TREE_TYPE (arg0),
13035 arg1),
13036 TREE_OPERAND (arg0, 1)))
13037 && !TREE_OVERFLOW (tem))
13038 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13040 /* Similarly for a NEGATE_EXPR. */
13041 if (TREE_CODE (arg0) == NEGATE_EXPR
13042 && TREE_CODE (arg1) == INTEGER_CST
13043 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
13044 arg1)))
13045 && TREE_CODE (tem) == INTEGER_CST
13046 && !TREE_OVERFLOW (tem))
13047 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13049 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13050 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13051 && TREE_CODE (arg1) == INTEGER_CST
13052 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13053 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13054 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
13055 fold_convert_loc (loc,
13056 TREE_TYPE (arg0),
13057 arg1),
13058 TREE_OPERAND (arg0, 1)));
13060 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13061 if ((TREE_CODE (arg0) == PLUS_EXPR
13062 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
13063 || TREE_CODE (arg0) == MINUS_EXPR)
13064 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13065 0)),
13066 arg1, 0)
13067 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13068 || POINTER_TYPE_P (TREE_TYPE (arg0))))
13070 tree val = TREE_OPERAND (arg0, 1);
13071 return omit_two_operands_loc (loc, type,
13072 fold_build2_loc (loc, code, type,
13073 val,
13074 build_int_cst (TREE_TYPE (val),
13075 0)),
13076 TREE_OPERAND (arg0, 0), arg1);
13079 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13080 if (TREE_CODE (arg0) == MINUS_EXPR
13081 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
13082 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13083 1)),
13084 arg1, 0)
13085 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
13087 return omit_two_operands_loc (loc, type,
13088 code == NE_EXPR
13089 ? boolean_true_node : boolean_false_node,
13090 TREE_OPERAND (arg0, 1), arg1);
13093 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13094 for !=. Don't do this for ordered comparisons due to overflow. */
13095 if (TREE_CODE (arg0) == MINUS_EXPR
13096 && integer_zerop (arg1))
13097 return fold_build2_loc (loc, code, type,
13098 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
13100 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13101 if (TREE_CODE (arg0) == ABS_EXPR
13102 && (integer_zerop (arg1) || real_zerop (arg1)))
13103 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13105 /* If this is an EQ or NE comparison with zero and ARG0 is
13106 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13107 two operations, but the latter can be done in one less insn
13108 on machines that have only two-operand insns or on which a
13109 constant cannot be the first operand. */
13110 if (TREE_CODE (arg0) == BIT_AND_EXPR
13111 && integer_zerop (arg1))
13113 tree arg00 = TREE_OPERAND (arg0, 0);
13114 tree arg01 = TREE_OPERAND (arg0, 1);
13115 if (TREE_CODE (arg00) == LSHIFT_EXPR
13116 && integer_onep (TREE_OPERAND (arg00, 0)))
13118 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13119 arg01, TREE_OPERAND (arg00, 1));
13120 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13121 build_int_cst (TREE_TYPE (arg0), 1));
13122 return fold_build2_loc (loc, code, type,
13123 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13124 arg1);
13126 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13127 && integer_onep (TREE_OPERAND (arg01, 0)))
13129 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13130 arg00, TREE_OPERAND (arg01, 1));
13131 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13132 build_int_cst (TREE_TYPE (arg0), 1));
13133 return fold_build2_loc (loc, code, type,
13134 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13135 arg1);
13139 /* If this is an NE or EQ comparison of zero against the result of a
13140 signed MOD operation whose second operand is a power of 2, make
13141 the MOD operation unsigned since it is simpler and equivalent. */
13142 if (integer_zerop (arg1)
13143 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13144 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13145 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13146 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13147 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13148 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13150 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13151 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13152 fold_convert_loc (loc, newtype,
13153 TREE_OPERAND (arg0, 0)),
13154 fold_convert_loc (loc, newtype,
13155 TREE_OPERAND (arg0, 1)));
13157 return fold_build2_loc (loc, code, type, newmod,
13158 fold_convert_loc (loc, newtype, arg1));
13161 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13162 C1 is a valid shift constant, and C2 is a power of two, i.e.
13163 a single bit. */
13164 if (TREE_CODE (arg0) == BIT_AND_EXPR
13165 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13166 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13167 == INTEGER_CST
13168 && integer_pow2p (TREE_OPERAND (arg0, 1))
13169 && integer_zerop (arg1))
13171 tree itype = TREE_TYPE (arg0);
13172 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13173 prec = TYPE_PRECISION (itype);
13175 /* Check for a valid shift count. */
13176 if (TREE_INT_CST_HIGH (arg001) == 0
13177 && TREE_INT_CST_LOW (arg001) < prec)
13179 tree arg01 = TREE_OPERAND (arg0, 1);
13180 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13181 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13182 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13183 can be rewritten as (X & (C2 << C1)) != 0. */
13184 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13186 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13187 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13188 return fold_build2_loc (loc, code, type, tem,
13189 fold_convert_loc (loc, itype, arg1));
13191 /* Otherwise, for signed (arithmetic) shifts,
13192 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13193 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13194 else if (!TYPE_UNSIGNED (itype))
13195 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13196 arg000, build_int_cst (itype, 0));
13197 /* Otherwise, of unsigned (logical) shifts,
13198 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13199 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13200 else
13201 return omit_one_operand_loc (loc, type,
13202 code == EQ_EXPR ? integer_one_node
13203 : integer_zero_node,
13204 arg000);
13208 /* If we have (A & C) == C where C is a power of 2, convert this into
13209 (A & C) != 0. Similarly for NE_EXPR. */
13210 if (TREE_CODE (arg0) == BIT_AND_EXPR
13211 && integer_pow2p (TREE_OPERAND (arg0, 1))
13212 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13213 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13214 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13215 integer_zero_node));
13217 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13218 bit, then fold the expression into A < 0 or A >= 0. */
13219 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13220 if (tem)
13221 return tem;
13223 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13224 Similarly for NE_EXPR. */
13225 if (TREE_CODE (arg0) == BIT_AND_EXPR
13226 && TREE_CODE (arg1) == INTEGER_CST
13227 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13229 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13230 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13231 TREE_OPERAND (arg0, 1));
13232 tree dandnotc
13233 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13234 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13235 notc);
13236 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13237 if (integer_nonzerop (dandnotc))
13238 return omit_one_operand_loc (loc, type, rslt, arg0);
13241 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13242 Similarly for NE_EXPR. */
13243 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13244 && TREE_CODE (arg1) == INTEGER_CST
13245 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13247 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13248 tree candnotd
13249 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13250 TREE_OPERAND (arg0, 1),
13251 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13252 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13253 if (integer_nonzerop (candnotd))
13254 return omit_one_operand_loc (loc, type, rslt, arg0);
13257 /* If this is a comparison of a field, we may be able to simplify it. */
13258 if ((TREE_CODE (arg0) == COMPONENT_REF
13259 || TREE_CODE (arg0) == BIT_FIELD_REF)
13260 /* Handle the constant case even without -O
13261 to make sure the warnings are given. */
13262 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13264 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13265 if (t1)
13266 return t1;
13269 /* Optimize comparisons of strlen vs zero to a compare of the
13270 first character of the string vs zero. To wit,
13271 strlen(ptr) == 0 => *ptr == 0
13272 strlen(ptr) != 0 => *ptr != 0
13273 Other cases should reduce to one of these two (or a constant)
13274 due to the return value of strlen being unsigned. */
13275 if (TREE_CODE (arg0) == CALL_EXPR
13276 && integer_zerop (arg1))
13278 tree fndecl = get_callee_fndecl (arg0);
13280 if (fndecl
13281 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13282 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13283 && call_expr_nargs (arg0) == 1
13284 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13286 tree iref = build_fold_indirect_ref_loc (loc,
13287 CALL_EXPR_ARG (arg0, 0));
13288 return fold_build2_loc (loc, code, type, iref,
13289 build_int_cst (TREE_TYPE (iref), 0));
13293 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13294 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13295 if (TREE_CODE (arg0) == RSHIFT_EXPR
13296 && integer_zerop (arg1)
13297 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13299 tree arg00 = TREE_OPERAND (arg0, 0);
13300 tree arg01 = TREE_OPERAND (arg0, 1);
13301 tree itype = TREE_TYPE (arg00);
13302 if (TREE_INT_CST_HIGH (arg01) == 0
13303 && TREE_INT_CST_LOW (arg01)
13304 == (unsigned HOST_WIDE_INT) (element_precision (itype) - 1))
13306 if (TYPE_UNSIGNED (itype))
13308 itype = signed_type_for (itype);
13309 arg00 = fold_convert_loc (loc, itype, arg00);
13311 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13312 type, arg00, build_zero_cst (itype));
13316 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13317 if (integer_zerop (arg1)
13318 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13319 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13320 TREE_OPERAND (arg0, 1));
13322 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13323 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13324 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13325 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13326 build_zero_cst (TREE_TYPE (arg0)));
13327 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13328 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13329 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13330 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13331 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13332 build_zero_cst (TREE_TYPE (arg0)));
13334 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13335 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13336 && TREE_CODE (arg1) == INTEGER_CST
13337 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13338 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13339 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13340 TREE_OPERAND (arg0, 1), arg1));
13342 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13343 (X & C) == 0 when C is a single bit. */
13344 if (TREE_CODE (arg0) == BIT_AND_EXPR
13345 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13346 && integer_zerop (arg1)
13347 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13349 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13350 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13351 TREE_OPERAND (arg0, 1));
13352 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13353 type, tem,
13354 fold_convert_loc (loc, TREE_TYPE (arg0),
13355 arg1));
13358 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13359 constant C is a power of two, i.e. a single bit. */
13360 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13361 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13362 && integer_zerop (arg1)
13363 && integer_pow2p (TREE_OPERAND (arg0, 1))
13364 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13365 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13367 tree arg00 = TREE_OPERAND (arg0, 0);
13368 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13369 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13372 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13373 when is C is a power of two, i.e. a single bit. */
13374 if (TREE_CODE (arg0) == BIT_AND_EXPR
13375 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13376 && integer_zerop (arg1)
13377 && integer_pow2p (TREE_OPERAND (arg0, 1))
13378 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13379 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13381 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13382 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13383 arg000, TREE_OPERAND (arg0, 1));
13384 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13385 tem, build_int_cst (TREE_TYPE (tem), 0));
13388 if (integer_zerop (arg1)
13389 && tree_expr_nonzero_p (arg0))
13391 tree res = constant_boolean_node (code==NE_EXPR, type);
13392 return omit_one_operand_loc (loc, type, res, arg0);
13395 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13396 if (TREE_CODE (arg0) == NEGATE_EXPR
13397 && TREE_CODE (arg1) == NEGATE_EXPR)
13398 return fold_build2_loc (loc, code, type,
13399 TREE_OPERAND (arg0, 0),
13400 fold_convert_loc (loc, TREE_TYPE (arg0),
13401 TREE_OPERAND (arg1, 0)));
13403 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13404 if (TREE_CODE (arg0) == BIT_AND_EXPR
13405 && TREE_CODE (arg1) == BIT_AND_EXPR)
13407 tree arg00 = TREE_OPERAND (arg0, 0);
13408 tree arg01 = TREE_OPERAND (arg0, 1);
13409 tree arg10 = TREE_OPERAND (arg1, 0);
13410 tree arg11 = TREE_OPERAND (arg1, 1);
13411 tree itype = TREE_TYPE (arg0);
13413 if (operand_equal_p (arg01, arg11, 0))
13414 return fold_build2_loc (loc, code, type,
13415 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13416 fold_build2_loc (loc,
13417 BIT_XOR_EXPR, itype,
13418 arg00, arg10),
13419 arg01),
13420 build_zero_cst (itype));
13422 if (operand_equal_p (arg01, arg10, 0))
13423 return fold_build2_loc (loc, code, type,
13424 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13425 fold_build2_loc (loc,
13426 BIT_XOR_EXPR, itype,
13427 arg00, arg11),
13428 arg01),
13429 build_zero_cst (itype));
13431 if (operand_equal_p (arg00, arg11, 0))
13432 return fold_build2_loc (loc, code, type,
13433 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13434 fold_build2_loc (loc,
13435 BIT_XOR_EXPR, itype,
13436 arg01, arg10),
13437 arg00),
13438 build_zero_cst (itype));
13440 if (operand_equal_p (arg00, arg10, 0))
13441 return fold_build2_loc (loc, code, type,
13442 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13443 fold_build2_loc (loc,
13444 BIT_XOR_EXPR, itype,
13445 arg01, arg11),
13446 arg00),
13447 build_zero_cst (itype));
13450 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13451 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13453 tree arg00 = TREE_OPERAND (arg0, 0);
13454 tree arg01 = TREE_OPERAND (arg0, 1);
13455 tree arg10 = TREE_OPERAND (arg1, 0);
13456 tree arg11 = TREE_OPERAND (arg1, 1);
13457 tree itype = TREE_TYPE (arg0);
13459 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13460 operand_equal_p guarantees no side-effects so we don't need
13461 to use omit_one_operand on Z. */
13462 if (operand_equal_p (arg01, arg11, 0))
13463 return fold_build2_loc (loc, code, type, arg00,
13464 fold_convert_loc (loc, TREE_TYPE (arg00),
13465 arg10));
13466 if (operand_equal_p (arg01, arg10, 0))
13467 return fold_build2_loc (loc, code, type, arg00,
13468 fold_convert_loc (loc, TREE_TYPE (arg00),
13469 arg11));
13470 if (operand_equal_p (arg00, arg11, 0))
13471 return fold_build2_loc (loc, code, type, arg01,
13472 fold_convert_loc (loc, TREE_TYPE (arg01),
13473 arg10));
13474 if (operand_equal_p (arg00, arg10, 0))
13475 return fold_build2_loc (loc, code, type, arg01,
13476 fold_convert_loc (loc, TREE_TYPE (arg01),
13477 arg11));
13479 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13480 if (TREE_CODE (arg01) == INTEGER_CST
13481 && TREE_CODE (arg11) == INTEGER_CST)
13483 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13484 fold_convert_loc (loc, itype, arg11));
13485 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13486 return fold_build2_loc (loc, code, type, tem,
13487 fold_convert_loc (loc, itype, arg10));
13491 /* Attempt to simplify equality/inequality comparisons of complex
13492 values. Only lower the comparison if the result is known or
13493 can be simplified to a single scalar comparison. */
13494 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13495 || TREE_CODE (arg0) == COMPLEX_CST)
13496 && (TREE_CODE (arg1) == COMPLEX_EXPR
13497 || TREE_CODE (arg1) == COMPLEX_CST))
13499 tree real0, imag0, real1, imag1;
13500 tree rcond, icond;
13502 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13504 real0 = TREE_OPERAND (arg0, 0);
13505 imag0 = TREE_OPERAND (arg0, 1);
13507 else
13509 real0 = TREE_REALPART (arg0);
13510 imag0 = TREE_IMAGPART (arg0);
13513 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13515 real1 = TREE_OPERAND (arg1, 0);
13516 imag1 = TREE_OPERAND (arg1, 1);
13518 else
13520 real1 = TREE_REALPART (arg1);
13521 imag1 = TREE_IMAGPART (arg1);
13524 rcond = fold_binary_loc (loc, code, type, real0, real1);
13525 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13527 if (integer_zerop (rcond))
13529 if (code == EQ_EXPR)
13530 return omit_two_operands_loc (loc, type, boolean_false_node,
13531 imag0, imag1);
13532 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13534 else
13536 if (code == NE_EXPR)
13537 return omit_two_operands_loc (loc, type, boolean_true_node,
13538 imag0, imag1);
13539 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13543 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13544 if (icond && TREE_CODE (icond) == INTEGER_CST)
13546 if (integer_zerop (icond))
13548 if (code == EQ_EXPR)
13549 return omit_two_operands_loc (loc, type, boolean_false_node,
13550 real0, real1);
13551 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13553 else
13555 if (code == NE_EXPR)
13556 return omit_two_operands_loc (loc, type, boolean_true_node,
13557 real0, real1);
13558 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13563 return NULL_TREE;
13565 case LT_EXPR:
13566 case GT_EXPR:
13567 case LE_EXPR:
13568 case GE_EXPR:
13569 tem = fold_comparison (loc, code, type, op0, op1);
13570 if (tem != NULL_TREE)
13571 return tem;
13573 /* Transform comparisons of the form X +- C CMP X. */
13574 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13575 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13576 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13577 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13578 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13579 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13581 tree arg01 = TREE_OPERAND (arg0, 1);
13582 enum tree_code code0 = TREE_CODE (arg0);
13583 int is_positive;
13585 if (TREE_CODE (arg01) == REAL_CST)
13586 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13587 else
13588 is_positive = tree_int_cst_sgn (arg01);
13590 /* (X - c) > X becomes false. */
13591 if (code == GT_EXPR
13592 && ((code0 == MINUS_EXPR && is_positive >= 0)
13593 || (code0 == PLUS_EXPR && is_positive <= 0)))
13595 if (TREE_CODE (arg01) == INTEGER_CST
13596 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13597 fold_overflow_warning (("assuming signed overflow does not "
13598 "occur when assuming that (X - c) > X "
13599 "is always false"),
13600 WARN_STRICT_OVERFLOW_ALL);
13601 return constant_boolean_node (0, type);
13604 /* Likewise (X + c) < X becomes false. */
13605 if (code == LT_EXPR
13606 && ((code0 == PLUS_EXPR && is_positive >= 0)
13607 || (code0 == MINUS_EXPR && is_positive <= 0)))
13609 if (TREE_CODE (arg01) == INTEGER_CST
13610 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13611 fold_overflow_warning (("assuming signed overflow does not "
13612 "occur when assuming that "
13613 "(X + c) < X is always false"),
13614 WARN_STRICT_OVERFLOW_ALL);
13615 return constant_boolean_node (0, type);
13618 /* Convert (X - c) <= X to true. */
13619 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13620 && code == LE_EXPR
13621 && ((code0 == MINUS_EXPR && is_positive >= 0)
13622 || (code0 == PLUS_EXPR && is_positive <= 0)))
13624 if (TREE_CODE (arg01) == INTEGER_CST
13625 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13626 fold_overflow_warning (("assuming signed overflow does not "
13627 "occur when assuming that "
13628 "(X - c) <= X is always true"),
13629 WARN_STRICT_OVERFLOW_ALL);
13630 return constant_boolean_node (1, type);
13633 /* Convert (X + c) >= X to true. */
13634 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13635 && code == GE_EXPR
13636 && ((code0 == PLUS_EXPR && is_positive >= 0)
13637 || (code0 == MINUS_EXPR && is_positive <= 0)))
13639 if (TREE_CODE (arg01) == INTEGER_CST
13640 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13641 fold_overflow_warning (("assuming signed overflow does not "
13642 "occur when assuming that "
13643 "(X + c) >= X is always true"),
13644 WARN_STRICT_OVERFLOW_ALL);
13645 return constant_boolean_node (1, type);
13648 if (TREE_CODE (arg01) == INTEGER_CST)
13650 /* Convert X + c > X and X - c < X to true for integers. */
13651 if (code == GT_EXPR
13652 && ((code0 == PLUS_EXPR && is_positive > 0)
13653 || (code0 == MINUS_EXPR && is_positive < 0)))
13655 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13656 fold_overflow_warning (("assuming signed overflow does "
13657 "not occur when assuming that "
13658 "(X + c) > X is always true"),
13659 WARN_STRICT_OVERFLOW_ALL);
13660 return constant_boolean_node (1, type);
13663 if (code == LT_EXPR
13664 && ((code0 == MINUS_EXPR && is_positive > 0)
13665 || (code0 == PLUS_EXPR && is_positive < 0)))
13667 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13668 fold_overflow_warning (("assuming signed overflow does "
13669 "not occur when assuming that "
13670 "(X - c) < X is always true"),
13671 WARN_STRICT_OVERFLOW_ALL);
13672 return constant_boolean_node (1, type);
13675 /* Convert X + c <= X and X - c >= X to false for integers. */
13676 if (code == LE_EXPR
13677 && ((code0 == PLUS_EXPR && is_positive > 0)
13678 || (code0 == MINUS_EXPR && is_positive < 0)))
13680 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13681 fold_overflow_warning (("assuming signed overflow does "
13682 "not occur when assuming that "
13683 "(X + c) <= X is always false"),
13684 WARN_STRICT_OVERFLOW_ALL);
13685 return constant_boolean_node (0, type);
13688 if (code == GE_EXPR
13689 && ((code0 == MINUS_EXPR && is_positive > 0)
13690 || (code0 == PLUS_EXPR && is_positive < 0)))
13692 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13693 fold_overflow_warning (("assuming signed overflow does "
13694 "not occur when assuming that "
13695 "(X - c) >= X is always false"),
13696 WARN_STRICT_OVERFLOW_ALL);
13697 return constant_boolean_node (0, type);
13702 /* Comparisons with the highest or lowest possible integer of
13703 the specified precision will have known values. */
13705 tree arg1_type = TREE_TYPE (arg1);
13706 unsigned int width = TYPE_PRECISION (arg1_type);
13708 if (TREE_CODE (arg1) == INTEGER_CST
13709 && width <= HOST_BITS_PER_DOUBLE_INT
13710 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13712 HOST_WIDE_INT signed_max_hi;
13713 unsigned HOST_WIDE_INT signed_max_lo;
13714 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13716 if (width <= HOST_BITS_PER_WIDE_INT)
13718 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13719 - 1;
13720 signed_max_hi = 0;
13721 max_hi = 0;
13723 if (TYPE_UNSIGNED (arg1_type))
13725 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13726 min_lo = 0;
13727 min_hi = 0;
13729 else
13731 max_lo = signed_max_lo;
13732 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13733 min_hi = -1;
13736 else
13738 width -= HOST_BITS_PER_WIDE_INT;
13739 signed_max_lo = -1;
13740 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13741 - 1;
13742 max_lo = -1;
13743 min_lo = 0;
13745 if (TYPE_UNSIGNED (arg1_type))
13747 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13748 min_hi = 0;
13750 else
13752 max_hi = signed_max_hi;
13753 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13757 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13758 && TREE_INT_CST_LOW (arg1) == max_lo)
13759 switch (code)
13761 case GT_EXPR:
13762 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13764 case GE_EXPR:
13765 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13767 case LE_EXPR:
13768 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13770 case LT_EXPR:
13771 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13773 /* The GE_EXPR and LT_EXPR cases above are not normally
13774 reached because of previous transformations. */
13776 default:
13777 break;
13779 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13780 == max_hi
13781 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13782 switch (code)
13784 case GT_EXPR:
13785 arg1 = const_binop (PLUS_EXPR, arg1,
13786 build_int_cst (TREE_TYPE (arg1), 1));
13787 return fold_build2_loc (loc, EQ_EXPR, type,
13788 fold_convert_loc (loc,
13789 TREE_TYPE (arg1), arg0),
13790 arg1);
13791 case LE_EXPR:
13792 arg1 = const_binop (PLUS_EXPR, arg1,
13793 build_int_cst (TREE_TYPE (arg1), 1));
13794 return fold_build2_loc (loc, NE_EXPR, type,
13795 fold_convert_loc (loc, TREE_TYPE (arg1),
13796 arg0),
13797 arg1);
13798 default:
13799 break;
13801 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13802 == min_hi
13803 && TREE_INT_CST_LOW (arg1) == min_lo)
13804 switch (code)
13806 case LT_EXPR:
13807 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13809 case LE_EXPR:
13810 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13812 case GE_EXPR:
13813 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13815 case GT_EXPR:
13816 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13818 default:
13819 break;
13821 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13822 == min_hi
13823 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13824 switch (code)
13826 case GE_EXPR:
13827 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13828 return fold_build2_loc (loc, NE_EXPR, type,
13829 fold_convert_loc (loc,
13830 TREE_TYPE (arg1), arg0),
13831 arg1);
13832 case LT_EXPR:
13833 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13834 return fold_build2_loc (loc, EQ_EXPR, type,
13835 fold_convert_loc (loc, TREE_TYPE (arg1),
13836 arg0),
13837 arg1);
13838 default:
13839 break;
13842 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13843 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13844 && TYPE_UNSIGNED (arg1_type)
13845 /* We will flip the signedness of the comparison operator
13846 associated with the mode of arg1, so the sign bit is
13847 specified by this mode. Check that arg1 is the signed
13848 max associated with this sign bit. */
13849 && width == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13850 /* signed_type does not work on pointer types. */
13851 && INTEGRAL_TYPE_P (arg1_type))
13853 /* The following case also applies to X < signed_max+1
13854 and X >= signed_max+1 because previous transformations. */
13855 if (code == LE_EXPR || code == GT_EXPR)
13857 tree st = signed_type_for (arg1_type);
13858 return fold_build2_loc (loc,
13859 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13860 type, fold_convert_loc (loc, st, arg0),
13861 build_int_cst (st, 0));
13867 /* If we are comparing an ABS_EXPR with a constant, we can
13868 convert all the cases into explicit comparisons, but they may
13869 well not be faster than doing the ABS and one comparison.
13870 But ABS (X) <= C is a range comparison, which becomes a subtraction
13871 and a comparison, and is probably faster. */
13872 if (code == LE_EXPR
13873 && TREE_CODE (arg1) == INTEGER_CST
13874 && TREE_CODE (arg0) == ABS_EXPR
13875 && ! TREE_SIDE_EFFECTS (arg0)
13876 && (0 != (tem = negate_expr (arg1)))
13877 && TREE_CODE (tem) == INTEGER_CST
13878 && !TREE_OVERFLOW (tem))
13879 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13880 build2 (GE_EXPR, type,
13881 TREE_OPERAND (arg0, 0), tem),
13882 build2 (LE_EXPR, type,
13883 TREE_OPERAND (arg0, 0), arg1));
13885 /* Convert ABS_EXPR<x> >= 0 to true. */
13886 strict_overflow_p = false;
13887 if (code == GE_EXPR
13888 && (integer_zerop (arg1)
13889 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13890 && real_zerop (arg1)))
13891 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13893 if (strict_overflow_p)
13894 fold_overflow_warning (("assuming signed overflow does not occur "
13895 "when simplifying comparison of "
13896 "absolute value and zero"),
13897 WARN_STRICT_OVERFLOW_CONDITIONAL);
13898 return omit_one_operand_loc (loc, type,
13899 constant_boolean_node (true, type),
13900 arg0);
13903 /* Convert ABS_EXPR<x> < 0 to false. */
13904 strict_overflow_p = false;
13905 if (code == LT_EXPR
13906 && (integer_zerop (arg1) || real_zerop (arg1))
13907 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13909 if (strict_overflow_p)
13910 fold_overflow_warning (("assuming signed overflow does not occur "
13911 "when simplifying comparison of "
13912 "absolute value and zero"),
13913 WARN_STRICT_OVERFLOW_CONDITIONAL);
13914 return omit_one_operand_loc (loc, type,
13915 constant_boolean_node (false, type),
13916 arg0);
13919 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13920 and similarly for >= into !=. */
13921 if ((code == LT_EXPR || code == GE_EXPR)
13922 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13923 && TREE_CODE (arg1) == LSHIFT_EXPR
13924 && integer_onep (TREE_OPERAND (arg1, 0)))
13925 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13926 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13927 TREE_OPERAND (arg1, 1)),
13928 build_zero_cst (TREE_TYPE (arg0)));
13930 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13931 otherwise Y might be >= # of bits in X's type and thus e.g.
13932 (unsigned char) (1 << Y) for Y 15 might be 0.
13933 If the cast is widening, then 1 << Y should have unsigned type,
13934 otherwise if Y is number of bits in the signed shift type minus 1,
13935 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13936 31 might be 0xffffffff80000000. */
13937 if ((code == LT_EXPR || code == GE_EXPR)
13938 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13939 && CONVERT_EXPR_P (arg1)
13940 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13941 && (TYPE_PRECISION (TREE_TYPE (arg1))
13942 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13943 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13944 || (TYPE_PRECISION (TREE_TYPE (arg1))
13945 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13946 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13948 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13949 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13950 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13951 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13952 build_zero_cst (TREE_TYPE (arg0)));
13955 return NULL_TREE;
13957 case UNORDERED_EXPR:
13958 case ORDERED_EXPR:
13959 case UNLT_EXPR:
13960 case UNLE_EXPR:
13961 case UNGT_EXPR:
13962 case UNGE_EXPR:
13963 case UNEQ_EXPR:
13964 case LTGT_EXPR:
13965 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13967 t1 = fold_relational_const (code, type, arg0, arg1);
13968 if (t1 != NULL_TREE)
13969 return t1;
13972 /* If the first operand is NaN, the result is constant. */
13973 if (TREE_CODE (arg0) == REAL_CST
13974 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13975 && (code != LTGT_EXPR || ! flag_trapping_math))
13977 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13978 ? integer_zero_node
13979 : integer_one_node;
13980 return omit_one_operand_loc (loc, type, t1, arg1);
13983 /* If the second operand is NaN, the result is constant. */
13984 if (TREE_CODE (arg1) == REAL_CST
13985 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13986 && (code != LTGT_EXPR || ! flag_trapping_math))
13988 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13989 ? integer_zero_node
13990 : integer_one_node;
13991 return omit_one_operand_loc (loc, type, t1, arg0);
13994 /* Simplify unordered comparison of something with itself. */
13995 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13996 && operand_equal_p (arg0, arg1, 0))
13997 return constant_boolean_node (1, type);
13999 if (code == LTGT_EXPR
14000 && !flag_trapping_math
14001 && operand_equal_p (arg0, arg1, 0))
14002 return constant_boolean_node (0, type);
14004 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
14006 tree targ0 = strip_float_extensions (arg0);
14007 tree targ1 = strip_float_extensions (arg1);
14008 tree newtype = TREE_TYPE (targ0);
14010 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
14011 newtype = TREE_TYPE (targ1);
14013 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
14014 return fold_build2_loc (loc, code, type,
14015 fold_convert_loc (loc, newtype, targ0),
14016 fold_convert_loc (loc, newtype, targ1));
14019 return NULL_TREE;
14021 case COMPOUND_EXPR:
14022 /* When pedantic, a compound expression can be neither an lvalue
14023 nor an integer constant expression. */
14024 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
14025 return NULL_TREE;
14026 /* Don't let (0, 0) be null pointer constant. */
14027 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
14028 : fold_convert_loc (loc, type, arg1);
14029 return pedantic_non_lvalue_loc (loc, tem);
14031 case COMPLEX_EXPR:
14032 if ((TREE_CODE (arg0) == REAL_CST
14033 && TREE_CODE (arg1) == REAL_CST)
14034 || (TREE_CODE (arg0) == INTEGER_CST
14035 && TREE_CODE (arg1) == INTEGER_CST))
14036 return build_complex (type, arg0, arg1);
14037 if (TREE_CODE (arg0) == REALPART_EXPR
14038 && TREE_CODE (arg1) == IMAGPART_EXPR
14039 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
14040 && operand_equal_p (TREE_OPERAND (arg0, 0),
14041 TREE_OPERAND (arg1, 0), 0))
14042 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
14043 TREE_OPERAND (arg1, 0));
14044 return NULL_TREE;
14046 case ASSERT_EXPR:
14047 /* An ASSERT_EXPR should never be passed to fold_binary. */
14048 gcc_unreachable ();
14050 case VEC_PACK_TRUNC_EXPR:
14051 case VEC_PACK_FIX_TRUNC_EXPR:
14053 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14054 tree *elts;
14056 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
14057 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
14058 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14059 return NULL_TREE;
14061 elts = XALLOCAVEC (tree, nelts);
14062 if (!vec_cst_ctor_to_array (arg0, elts)
14063 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
14064 return NULL_TREE;
14066 for (i = 0; i < nelts; i++)
14068 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
14069 ? NOP_EXPR : FIX_TRUNC_EXPR,
14070 TREE_TYPE (type), elts[i]);
14071 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
14072 return NULL_TREE;
14075 return build_vector (type, elts);
14078 case VEC_WIDEN_MULT_LO_EXPR:
14079 case VEC_WIDEN_MULT_HI_EXPR:
14080 case VEC_WIDEN_MULT_EVEN_EXPR:
14081 case VEC_WIDEN_MULT_ODD_EXPR:
14083 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
14084 unsigned int out, ofs, scale;
14085 tree *elts;
14087 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
14088 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
14089 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14090 return NULL_TREE;
14092 elts = XALLOCAVEC (tree, nelts * 4);
14093 if (!vec_cst_ctor_to_array (arg0, elts)
14094 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
14095 return NULL_TREE;
14097 if (code == VEC_WIDEN_MULT_LO_EXPR)
14098 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
14099 else if (code == VEC_WIDEN_MULT_HI_EXPR)
14100 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14101 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14102 scale = 1, ofs = 0;
14103 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14104 scale = 1, ofs = 1;
14106 for (out = 0; out < nelts; out++)
14108 unsigned int in1 = (out << scale) + ofs;
14109 unsigned int in2 = in1 + nelts * 2;
14110 tree t1, t2;
14112 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14113 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14115 if (t1 == NULL_TREE || t2 == NULL_TREE)
14116 return NULL_TREE;
14117 elts[out] = const_binop (MULT_EXPR, t1, t2);
14118 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14119 return NULL_TREE;
14122 return build_vector (type, elts);
14125 default:
14126 return NULL_TREE;
14127 } /* switch (code) */
14130 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14131 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14132 of GOTO_EXPR. */
14134 static tree
14135 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14137 switch (TREE_CODE (*tp))
14139 case LABEL_EXPR:
14140 return *tp;
14142 case GOTO_EXPR:
14143 *walk_subtrees = 0;
14145 /* ... fall through ... */
14147 default:
14148 return NULL_TREE;
14152 /* Return whether the sub-tree ST contains a label which is accessible from
14153 outside the sub-tree. */
14155 static bool
14156 contains_label_p (tree st)
14158 return
14159 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14162 /* Fold a ternary expression of code CODE and type TYPE with operands
14163 OP0, OP1, and OP2. Return the folded expression if folding is
14164 successful. Otherwise, return NULL_TREE. */
14166 tree
14167 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14168 tree op0, tree op1, tree op2)
14170 tree tem;
14171 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14172 enum tree_code_class kind = TREE_CODE_CLASS (code);
14174 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14175 && TREE_CODE_LENGTH (code) == 3);
14177 /* Strip any conversions that don't change the mode. This is safe
14178 for every expression, except for a comparison expression because
14179 its signedness is derived from its operands. So, in the latter
14180 case, only strip conversions that don't change the signedness.
14182 Note that this is done as an internal manipulation within the
14183 constant folder, in order to find the simplest representation of
14184 the arguments so that their form can be studied. In any cases,
14185 the appropriate type conversions should be put back in the tree
14186 that will get out of the constant folder. */
14187 if (op0)
14189 arg0 = op0;
14190 STRIP_NOPS (arg0);
14193 if (op1)
14195 arg1 = op1;
14196 STRIP_NOPS (arg1);
14199 if (op2)
14201 arg2 = op2;
14202 STRIP_NOPS (arg2);
14205 switch (code)
14207 case COMPONENT_REF:
14208 if (TREE_CODE (arg0) == CONSTRUCTOR
14209 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14211 unsigned HOST_WIDE_INT idx;
14212 tree field, value;
14213 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14214 if (field == arg1)
14215 return value;
14217 return NULL_TREE;
14219 case COND_EXPR:
14220 case VEC_COND_EXPR:
14221 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14222 so all simple results must be passed through pedantic_non_lvalue. */
14223 if (TREE_CODE (arg0) == INTEGER_CST)
14225 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14226 tem = integer_zerop (arg0) ? op2 : op1;
14227 /* Only optimize constant conditions when the selected branch
14228 has the same type as the COND_EXPR. This avoids optimizing
14229 away "c ? x : throw", where the throw has a void type.
14230 Avoid throwing away that operand which contains label. */
14231 if ((!TREE_SIDE_EFFECTS (unused_op)
14232 || !contains_label_p (unused_op))
14233 && (! VOID_TYPE_P (TREE_TYPE (tem))
14234 || VOID_TYPE_P (type)))
14235 return pedantic_non_lvalue_loc (loc, tem);
14236 return NULL_TREE;
14238 else if (TREE_CODE (arg0) == VECTOR_CST)
14240 if (integer_all_onesp (arg0))
14241 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14242 if (integer_zerop (arg0))
14243 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14245 if ((TREE_CODE (arg1) == VECTOR_CST
14246 || TREE_CODE (arg1) == CONSTRUCTOR)
14247 && (TREE_CODE (arg2) == VECTOR_CST
14248 || TREE_CODE (arg2) == CONSTRUCTOR))
14250 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14251 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14252 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14253 for (i = 0; i < nelts; i++)
14255 tree val = VECTOR_CST_ELT (arg0, i);
14256 if (integer_all_onesp (val))
14257 sel[i] = i;
14258 else if (integer_zerop (val))
14259 sel[i] = nelts + i;
14260 else /* Currently unreachable. */
14261 return NULL_TREE;
14263 tree t = fold_vec_perm (type, arg1, arg2, sel);
14264 if (t != NULL_TREE)
14265 return t;
14269 if (operand_equal_p (arg1, op2, 0))
14270 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14272 /* If we have A op B ? A : C, we may be able to convert this to a
14273 simpler expression, depending on the operation and the values
14274 of B and C. Signed zeros prevent all of these transformations,
14275 for reasons given above each one.
14277 Also try swapping the arguments and inverting the conditional. */
14278 if (COMPARISON_CLASS_P (arg0)
14279 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14280 arg1, TREE_OPERAND (arg0, 1))
14281 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14283 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14284 if (tem)
14285 return tem;
14288 if (COMPARISON_CLASS_P (arg0)
14289 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14290 op2,
14291 TREE_OPERAND (arg0, 1))
14292 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14294 location_t loc0 = expr_location_or (arg0, loc);
14295 tem = fold_invert_truthvalue (loc0, arg0);
14296 if (tem && COMPARISON_CLASS_P (tem))
14298 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14299 if (tem)
14300 return tem;
14304 /* If the second operand is simpler than the third, swap them
14305 since that produces better jump optimization results. */
14306 if (truth_value_p (TREE_CODE (arg0))
14307 && tree_swap_operands_p (op1, op2, false))
14309 location_t loc0 = expr_location_or (arg0, loc);
14310 /* See if this can be inverted. If it can't, possibly because
14311 it was a floating-point inequality comparison, don't do
14312 anything. */
14313 tem = fold_invert_truthvalue (loc0, arg0);
14314 if (tem)
14315 return fold_build3_loc (loc, code, type, tem, op2, op1);
14318 /* Convert A ? 1 : 0 to simply A. */
14319 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14320 : (integer_onep (op1)
14321 && !VECTOR_TYPE_P (type)))
14322 && integer_zerop (op2)
14323 /* If we try to convert OP0 to our type, the
14324 call to fold will try to move the conversion inside
14325 a COND, which will recurse. In that case, the COND_EXPR
14326 is probably the best choice, so leave it alone. */
14327 && type == TREE_TYPE (arg0))
14328 return pedantic_non_lvalue_loc (loc, arg0);
14330 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14331 over COND_EXPR in cases such as floating point comparisons. */
14332 if (integer_zerop (op1)
14333 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14334 : (integer_onep (op2)
14335 && !VECTOR_TYPE_P (type)))
14336 && truth_value_p (TREE_CODE (arg0)))
14337 return pedantic_non_lvalue_loc (loc,
14338 fold_convert_loc (loc, type,
14339 invert_truthvalue_loc (loc,
14340 arg0)));
14342 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14343 if (TREE_CODE (arg0) == LT_EXPR
14344 && integer_zerop (TREE_OPERAND (arg0, 1))
14345 && integer_zerop (op2)
14346 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14348 /* sign_bit_p looks through both zero and sign extensions,
14349 but for this optimization only sign extensions are
14350 usable. */
14351 tree tem2 = TREE_OPERAND (arg0, 0);
14352 while (tem != tem2)
14354 if (TREE_CODE (tem2) != NOP_EXPR
14355 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14357 tem = NULL_TREE;
14358 break;
14360 tem2 = TREE_OPERAND (tem2, 0);
14362 /* sign_bit_p only checks ARG1 bits within A's precision.
14363 If <sign bit of A> has wider type than A, bits outside
14364 of A's precision in <sign bit of A> need to be checked.
14365 If they are all 0, this optimization needs to be done
14366 in unsigned A's type, if they are all 1 in signed A's type,
14367 otherwise this can't be done. */
14368 if (tem
14369 && TYPE_PRECISION (TREE_TYPE (tem))
14370 < TYPE_PRECISION (TREE_TYPE (arg1))
14371 && TYPE_PRECISION (TREE_TYPE (tem))
14372 < TYPE_PRECISION (type))
14374 unsigned HOST_WIDE_INT mask_lo;
14375 HOST_WIDE_INT mask_hi;
14376 int inner_width, outer_width;
14377 tree tem_type;
14379 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14380 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14381 if (outer_width > TYPE_PRECISION (type))
14382 outer_width = TYPE_PRECISION (type);
14384 if (outer_width > HOST_BITS_PER_WIDE_INT)
14386 mask_hi = (HOST_WIDE_INT_M1U
14387 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14388 mask_lo = -1;
14390 else
14392 mask_hi = 0;
14393 mask_lo = (HOST_WIDE_INT_M1U
14394 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14396 if (inner_width > HOST_BITS_PER_WIDE_INT)
14398 mask_hi &= ~(HOST_WIDE_INT_M1U
14399 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14400 mask_lo = 0;
14402 else
14403 mask_lo &= ~(HOST_WIDE_INT_M1U
14404 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14406 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14407 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14409 tem_type = signed_type_for (TREE_TYPE (tem));
14410 tem = fold_convert_loc (loc, tem_type, tem);
14412 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14413 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14415 tem_type = unsigned_type_for (TREE_TYPE (tem));
14416 tem = fold_convert_loc (loc, tem_type, tem);
14418 else
14419 tem = NULL;
14422 if (tem)
14423 return
14424 fold_convert_loc (loc, type,
14425 fold_build2_loc (loc, BIT_AND_EXPR,
14426 TREE_TYPE (tem), tem,
14427 fold_convert_loc (loc,
14428 TREE_TYPE (tem),
14429 arg1)));
14432 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14433 already handled above. */
14434 if (TREE_CODE (arg0) == BIT_AND_EXPR
14435 && integer_onep (TREE_OPERAND (arg0, 1))
14436 && integer_zerop (op2)
14437 && integer_pow2p (arg1))
14439 tree tem = TREE_OPERAND (arg0, 0);
14440 STRIP_NOPS (tem);
14441 if (TREE_CODE (tem) == RSHIFT_EXPR
14442 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14443 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14444 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14445 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14446 TREE_OPERAND (tem, 0), arg1);
14449 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14450 is probably obsolete because the first operand should be a
14451 truth value (that's why we have the two cases above), but let's
14452 leave it in until we can confirm this for all front-ends. */
14453 if (integer_zerop (op2)
14454 && TREE_CODE (arg0) == NE_EXPR
14455 && integer_zerop (TREE_OPERAND (arg0, 1))
14456 && integer_pow2p (arg1)
14457 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14458 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14459 arg1, OEP_ONLY_CONST))
14460 return pedantic_non_lvalue_loc (loc,
14461 fold_convert_loc (loc, type,
14462 TREE_OPERAND (arg0, 0)));
14464 /* Disable the transformations below for vectors, since
14465 fold_binary_op_with_conditional_arg may undo them immediately,
14466 yielding an infinite loop. */
14467 if (code == VEC_COND_EXPR)
14468 return NULL_TREE;
14470 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14471 if (integer_zerop (op2)
14472 && truth_value_p (TREE_CODE (arg0))
14473 && truth_value_p (TREE_CODE (arg1))
14474 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14475 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14476 : TRUTH_ANDIF_EXPR,
14477 type, fold_convert_loc (loc, type, arg0), arg1);
14479 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14480 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14481 && truth_value_p (TREE_CODE (arg0))
14482 && truth_value_p (TREE_CODE (arg1))
14483 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14485 location_t loc0 = expr_location_or (arg0, loc);
14486 /* Only perform transformation if ARG0 is easily inverted. */
14487 tem = fold_invert_truthvalue (loc0, arg0);
14488 if (tem)
14489 return fold_build2_loc (loc, code == VEC_COND_EXPR
14490 ? BIT_IOR_EXPR
14491 : TRUTH_ORIF_EXPR,
14492 type, fold_convert_loc (loc, type, tem),
14493 arg1);
14496 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14497 if (integer_zerop (arg1)
14498 && truth_value_p (TREE_CODE (arg0))
14499 && truth_value_p (TREE_CODE (op2))
14500 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14502 location_t loc0 = expr_location_or (arg0, loc);
14503 /* Only perform transformation if ARG0 is easily inverted. */
14504 tem = fold_invert_truthvalue (loc0, arg0);
14505 if (tem)
14506 return fold_build2_loc (loc, code == VEC_COND_EXPR
14507 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14508 type, fold_convert_loc (loc, type, tem),
14509 op2);
14512 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14513 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14514 && truth_value_p (TREE_CODE (arg0))
14515 && truth_value_p (TREE_CODE (op2))
14516 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14517 return fold_build2_loc (loc, code == VEC_COND_EXPR
14518 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14519 type, fold_convert_loc (loc, type, arg0), op2);
14521 return NULL_TREE;
14523 case CALL_EXPR:
14524 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14525 of fold_ternary on them. */
14526 gcc_unreachable ();
14528 case BIT_FIELD_REF:
14529 if ((TREE_CODE (arg0) == VECTOR_CST
14530 || (TREE_CODE (arg0) == CONSTRUCTOR
14531 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14532 && (type == TREE_TYPE (TREE_TYPE (arg0))
14533 || (TREE_CODE (type) == VECTOR_TYPE
14534 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14536 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14537 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14538 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14539 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14541 if (n != 0
14542 && (idx % width) == 0
14543 && (n % width) == 0
14544 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14546 idx = idx / width;
14547 n = n / width;
14549 if (TREE_CODE (arg0) == VECTOR_CST)
14551 if (n == 1)
14552 return VECTOR_CST_ELT (arg0, idx);
14554 tree *vals = XALLOCAVEC (tree, n);
14555 for (unsigned i = 0; i < n; ++i)
14556 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14557 return build_vector (type, vals);
14560 /* Constructor elements can be subvectors. */
14561 unsigned HOST_WIDE_INT k = 1;
14562 if (CONSTRUCTOR_NELTS (arg0) != 0)
14564 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14565 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14566 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14569 /* We keep an exact subset of the constructor elements. */
14570 if ((idx % k) == 0 && (n % k) == 0)
14572 if (CONSTRUCTOR_NELTS (arg0) == 0)
14573 return build_constructor (type, NULL);
14574 idx /= k;
14575 n /= k;
14576 if (n == 1)
14578 if (idx < CONSTRUCTOR_NELTS (arg0))
14579 return CONSTRUCTOR_ELT (arg0, idx)->value;
14580 return build_zero_cst (type);
14583 vec<constructor_elt, va_gc> *vals;
14584 vec_alloc (vals, n);
14585 for (unsigned i = 0;
14586 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14587 ++i)
14588 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14589 CONSTRUCTOR_ELT
14590 (arg0, idx + i)->value);
14591 return build_constructor (type, vals);
14593 /* The bitfield references a single constructor element. */
14594 else if (idx + n <= (idx / k + 1) * k)
14596 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14597 return build_zero_cst (type);
14598 else if (n == k)
14599 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14600 else
14601 return fold_build3_loc (loc, code, type,
14602 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14603 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14608 /* A bit-field-ref that referenced the full argument can be stripped. */
14609 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14610 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14611 && integer_zerop (op2))
14612 return fold_convert_loc (loc, type, arg0);
14614 /* On constants we can use native encode/interpret to constant
14615 fold (nearly) all BIT_FIELD_REFs. */
14616 if (CONSTANT_CLASS_P (arg0)
14617 && can_native_interpret_type_p (type)
14618 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14619 /* This limitation should not be necessary, we just need to
14620 round this up to mode size. */
14621 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14622 /* Need bit-shifting of the buffer to relax the following. */
14623 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14625 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14626 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14627 unsigned HOST_WIDE_INT clen;
14628 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14629 /* ??? We cannot tell native_encode_expr to start at
14630 some random byte only. So limit us to a reasonable amount
14631 of work. */
14632 if (clen <= 4096)
14634 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14635 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14636 if (len > 0
14637 && len * BITS_PER_UNIT >= bitpos + bitsize)
14639 tree v = native_interpret_expr (type,
14640 b + bitpos / BITS_PER_UNIT,
14641 bitsize / BITS_PER_UNIT);
14642 if (v)
14643 return v;
14648 return NULL_TREE;
14650 case FMA_EXPR:
14651 /* For integers we can decompose the FMA if possible. */
14652 if (TREE_CODE (arg0) == INTEGER_CST
14653 && TREE_CODE (arg1) == INTEGER_CST)
14654 return fold_build2_loc (loc, PLUS_EXPR, type,
14655 const_binop (MULT_EXPR, arg0, arg1), arg2);
14656 if (integer_zerop (arg2))
14657 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14659 return fold_fma (loc, type, arg0, arg1, arg2);
14661 case VEC_PERM_EXPR:
14662 if (TREE_CODE (arg2) == VECTOR_CST)
14664 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14665 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14666 tree t;
14667 bool need_mask_canon = false;
14668 bool all_in_vec0 = true;
14669 bool all_in_vec1 = true;
14670 bool maybe_identity = true;
14671 bool single_arg = (op0 == op1);
14672 bool changed = false;
14674 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14675 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14676 for (i = 0; i < nelts; i++)
14678 tree val = VECTOR_CST_ELT (arg2, i);
14679 if (TREE_CODE (val) != INTEGER_CST)
14680 return NULL_TREE;
14682 sel[i] = TREE_INT_CST_LOW (val) & mask;
14683 if (TREE_INT_CST_HIGH (val)
14684 || ((unsigned HOST_WIDE_INT)
14685 TREE_INT_CST_LOW (val) != sel[i]))
14686 need_mask_canon = true;
14688 if (sel[i] < nelts)
14689 all_in_vec1 = false;
14690 else
14691 all_in_vec0 = false;
14693 if ((sel[i] & (nelts-1)) != i)
14694 maybe_identity = false;
14697 if (maybe_identity)
14699 if (all_in_vec0)
14700 return op0;
14701 if (all_in_vec1)
14702 return op1;
14705 if (all_in_vec0)
14706 op1 = op0;
14707 else if (all_in_vec1)
14709 op0 = op1;
14710 for (i = 0; i < nelts; i++)
14711 sel[i] -= nelts;
14712 need_mask_canon = true;
14715 if ((TREE_CODE (op0) == VECTOR_CST
14716 || TREE_CODE (op0) == CONSTRUCTOR)
14717 && (TREE_CODE (op1) == VECTOR_CST
14718 || TREE_CODE (op1) == CONSTRUCTOR))
14720 t = fold_vec_perm (type, op0, op1, sel);
14721 if (t != NULL_TREE)
14722 return t;
14725 if (op0 == op1 && !single_arg)
14726 changed = true;
14728 if (need_mask_canon && arg2 == op2)
14730 tree *tsel = XALLOCAVEC (tree, nelts);
14731 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14732 for (i = 0; i < nelts; i++)
14733 tsel[i] = build_int_cst (eltype, sel[i]);
14734 op2 = build_vector (TREE_TYPE (arg2), tsel);
14735 changed = true;
14738 if (changed)
14739 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14741 return NULL_TREE;
14743 default:
14744 return NULL_TREE;
14745 } /* switch (code) */
14748 /* Perform constant folding and related simplification of EXPR.
14749 The related simplifications include x*1 => x, x*0 => 0, etc.,
14750 and application of the associative law.
14751 NOP_EXPR conversions may be removed freely (as long as we
14752 are careful not to change the type of the overall expression).
14753 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14754 but we can constant-fold them if they have constant operands. */
14756 #ifdef ENABLE_FOLD_CHECKING
14757 # define fold(x) fold_1 (x)
14758 static tree fold_1 (tree);
14759 static
14760 #endif
14761 tree
14762 fold (tree expr)
14764 const tree t = expr;
14765 enum tree_code code = TREE_CODE (t);
14766 enum tree_code_class kind = TREE_CODE_CLASS (code);
14767 tree tem;
14768 location_t loc = EXPR_LOCATION (expr);
14770 /* Return right away if a constant. */
14771 if (kind == tcc_constant)
14772 return t;
14774 /* CALL_EXPR-like objects with variable numbers of operands are
14775 treated specially. */
14776 if (kind == tcc_vl_exp)
14778 if (code == CALL_EXPR)
14780 tem = fold_call_expr (loc, expr, false);
14781 return tem ? tem : expr;
14783 return expr;
14786 if (IS_EXPR_CODE_CLASS (kind))
14788 tree type = TREE_TYPE (t);
14789 tree op0, op1, op2;
14791 switch (TREE_CODE_LENGTH (code))
14793 case 1:
14794 op0 = TREE_OPERAND (t, 0);
14795 tem = fold_unary_loc (loc, code, type, op0);
14796 return tem ? tem : expr;
14797 case 2:
14798 op0 = TREE_OPERAND (t, 0);
14799 op1 = TREE_OPERAND (t, 1);
14800 tem = fold_binary_loc (loc, code, type, op0, op1);
14801 return tem ? tem : expr;
14802 case 3:
14803 op0 = TREE_OPERAND (t, 0);
14804 op1 = TREE_OPERAND (t, 1);
14805 op2 = TREE_OPERAND (t, 2);
14806 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14807 return tem ? tem : expr;
14808 default:
14809 break;
14813 switch (code)
14815 case ARRAY_REF:
14817 tree op0 = TREE_OPERAND (t, 0);
14818 tree op1 = TREE_OPERAND (t, 1);
14820 if (TREE_CODE (op1) == INTEGER_CST
14821 && TREE_CODE (op0) == CONSTRUCTOR
14822 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14824 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14825 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14826 unsigned HOST_WIDE_INT begin = 0;
14828 /* Find a matching index by means of a binary search. */
14829 while (begin != end)
14831 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14832 tree index = (*elts)[middle].index;
14834 if (TREE_CODE (index) == INTEGER_CST
14835 && tree_int_cst_lt (index, op1))
14836 begin = middle + 1;
14837 else if (TREE_CODE (index) == INTEGER_CST
14838 && tree_int_cst_lt (op1, index))
14839 end = middle;
14840 else if (TREE_CODE (index) == RANGE_EXPR
14841 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14842 begin = middle + 1;
14843 else if (TREE_CODE (index) == RANGE_EXPR
14844 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14845 end = middle;
14846 else
14847 return (*elts)[middle].value;
14851 return t;
14854 /* Return a VECTOR_CST if possible. */
14855 case CONSTRUCTOR:
14857 tree type = TREE_TYPE (t);
14858 if (TREE_CODE (type) != VECTOR_TYPE)
14859 return t;
14861 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14862 unsigned HOST_WIDE_INT idx, pos = 0;
14863 tree value;
14865 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14867 if (!CONSTANT_CLASS_P (value))
14868 return t;
14869 if (TREE_CODE (value) == VECTOR_CST)
14871 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14872 vec[pos++] = VECTOR_CST_ELT (value, i);
14874 else
14875 vec[pos++] = value;
14877 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14878 vec[pos] = build_zero_cst (TREE_TYPE (type));
14880 return build_vector (type, vec);
14883 case CONST_DECL:
14884 return fold (DECL_INITIAL (t));
14886 default:
14887 return t;
14888 } /* switch (code) */
14891 #ifdef ENABLE_FOLD_CHECKING
14892 #undef fold
14894 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14895 hash_table <pointer_hash <tree_node> >);
14896 static void fold_check_failed (const_tree, const_tree);
14897 void print_fold_checksum (const_tree);
14899 /* When --enable-checking=fold, compute a digest of expr before
14900 and after actual fold call to see if fold did not accidentally
14901 change original expr. */
14903 tree
14904 fold (tree expr)
14906 tree ret;
14907 struct md5_ctx ctx;
14908 unsigned char checksum_before[16], checksum_after[16];
14909 hash_table <pointer_hash <tree_node> > ht;
14911 ht.create (32);
14912 md5_init_ctx (&ctx);
14913 fold_checksum_tree (expr, &ctx, ht);
14914 md5_finish_ctx (&ctx, checksum_before);
14915 ht.empty ();
14917 ret = fold_1 (expr);
14919 md5_init_ctx (&ctx);
14920 fold_checksum_tree (expr, &ctx, ht);
14921 md5_finish_ctx (&ctx, checksum_after);
14922 ht.dispose ();
14924 if (memcmp (checksum_before, checksum_after, 16))
14925 fold_check_failed (expr, ret);
14927 return ret;
14930 void
14931 print_fold_checksum (const_tree expr)
14933 struct md5_ctx ctx;
14934 unsigned char checksum[16], cnt;
14935 hash_table <pointer_hash <tree_node> > ht;
14937 ht.create (32);
14938 md5_init_ctx (&ctx);
14939 fold_checksum_tree (expr, &ctx, ht);
14940 md5_finish_ctx (&ctx, checksum);
14941 ht.dispose ();
14942 for (cnt = 0; cnt < 16; ++cnt)
14943 fprintf (stderr, "%02x", checksum[cnt]);
14944 putc ('\n', stderr);
14947 static void
14948 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14950 internal_error ("fold check: original tree changed by fold");
14953 static void
14954 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14955 hash_table <pointer_hash <tree_node> > ht)
14957 tree_node **slot;
14958 enum tree_code code;
14959 union tree_node buf;
14960 int i, len;
14962 recursive_label:
14963 if (expr == NULL)
14964 return;
14965 slot = ht.find_slot (expr, INSERT);
14966 if (*slot != NULL)
14967 return;
14968 *slot = CONST_CAST_TREE (expr);
14969 code = TREE_CODE (expr);
14970 if (TREE_CODE_CLASS (code) == tcc_declaration
14971 && DECL_ASSEMBLER_NAME_SET_P (expr))
14973 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14974 memcpy ((char *) &buf, expr, tree_size (expr));
14975 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14976 expr = (tree) &buf;
14978 else if (TREE_CODE_CLASS (code) == tcc_type
14979 && (TYPE_POINTER_TO (expr)
14980 || TYPE_REFERENCE_TO (expr)
14981 || TYPE_CACHED_VALUES_P (expr)
14982 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14983 || TYPE_NEXT_VARIANT (expr)))
14985 /* Allow these fields to be modified. */
14986 tree tmp;
14987 memcpy ((char *) &buf, expr, tree_size (expr));
14988 expr = tmp = (tree) &buf;
14989 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14990 TYPE_POINTER_TO (tmp) = NULL;
14991 TYPE_REFERENCE_TO (tmp) = NULL;
14992 TYPE_NEXT_VARIANT (tmp) = NULL;
14993 if (TYPE_CACHED_VALUES_P (tmp))
14995 TYPE_CACHED_VALUES_P (tmp) = 0;
14996 TYPE_CACHED_VALUES (tmp) = NULL;
14999 md5_process_bytes (expr, tree_size (expr), ctx);
15000 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
15001 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
15002 if (TREE_CODE_CLASS (code) != tcc_type
15003 && TREE_CODE_CLASS (code) != tcc_declaration
15004 && code != TREE_LIST
15005 && code != SSA_NAME
15006 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
15007 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
15008 switch (TREE_CODE_CLASS (code))
15010 case tcc_constant:
15011 switch (code)
15013 case STRING_CST:
15014 md5_process_bytes (TREE_STRING_POINTER (expr),
15015 TREE_STRING_LENGTH (expr), ctx);
15016 break;
15017 case COMPLEX_CST:
15018 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
15019 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
15020 break;
15021 case VECTOR_CST:
15022 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
15023 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
15024 break;
15025 default:
15026 break;
15028 break;
15029 case tcc_exceptional:
15030 switch (code)
15032 case TREE_LIST:
15033 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
15034 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
15035 expr = TREE_CHAIN (expr);
15036 goto recursive_label;
15037 break;
15038 case TREE_VEC:
15039 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
15040 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
15041 break;
15042 default:
15043 break;
15045 break;
15046 case tcc_expression:
15047 case tcc_reference:
15048 case tcc_comparison:
15049 case tcc_unary:
15050 case tcc_binary:
15051 case tcc_statement:
15052 case tcc_vl_exp:
15053 len = TREE_OPERAND_LENGTH (expr);
15054 for (i = 0; i < len; ++i)
15055 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
15056 break;
15057 case tcc_declaration:
15058 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
15059 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
15060 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
15062 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
15063 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
15064 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
15065 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
15066 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
15068 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
15069 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
15071 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
15073 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
15074 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
15075 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
15077 break;
15078 case tcc_type:
15079 if (TREE_CODE (expr) == ENUMERAL_TYPE)
15080 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
15081 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
15082 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
15083 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
15084 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
15085 if (INTEGRAL_TYPE_P (expr)
15086 || SCALAR_FLOAT_TYPE_P (expr))
15088 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
15089 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
15091 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
15092 if (TREE_CODE (expr) == RECORD_TYPE
15093 || TREE_CODE (expr) == UNION_TYPE
15094 || TREE_CODE (expr) == QUAL_UNION_TYPE)
15095 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
15096 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
15097 break;
15098 default:
15099 break;
15103 /* Helper function for outputting the checksum of a tree T. When
15104 debugging with gdb, you can "define mynext" to be "next" followed
15105 by "call debug_fold_checksum (op0)", then just trace down till the
15106 outputs differ. */
15108 DEBUG_FUNCTION void
15109 debug_fold_checksum (const_tree t)
15111 int i;
15112 unsigned char checksum[16];
15113 struct md5_ctx ctx;
15114 hash_table <pointer_hash <tree_node> > ht;
15115 ht.create (32);
15117 md5_init_ctx (&ctx);
15118 fold_checksum_tree (t, &ctx, ht);
15119 md5_finish_ctx (&ctx, checksum);
15120 ht.empty ();
15122 for (i = 0; i < 16; i++)
15123 fprintf (stderr, "%d ", checksum[i]);
15125 fprintf (stderr, "\n");
15128 #endif
15130 /* Fold a unary tree expression with code CODE of type TYPE with an
15131 operand OP0. LOC is the location of the resulting expression.
15132 Return a folded expression if successful. Otherwise, return a tree
15133 expression with code CODE of type TYPE with an operand OP0. */
15135 tree
15136 fold_build1_stat_loc (location_t loc,
15137 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15139 tree tem;
15140 #ifdef ENABLE_FOLD_CHECKING
15141 unsigned char checksum_before[16], checksum_after[16];
15142 struct md5_ctx ctx;
15143 hash_table <pointer_hash <tree_node> > ht;
15145 ht.create (32);
15146 md5_init_ctx (&ctx);
15147 fold_checksum_tree (op0, &ctx, ht);
15148 md5_finish_ctx (&ctx, checksum_before);
15149 ht.empty ();
15150 #endif
15152 tem = fold_unary_loc (loc, code, type, op0);
15153 if (!tem)
15154 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15156 #ifdef ENABLE_FOLD_CHECKING
15157 md5_init_ctx (&ctx);
15158 fold_checksum_tree (op0, &ctx, ht);
15159 md5_finish_ctx (&ctx, checksum_after);
15160 ht.dispose ();
15162 if (memcmp (checksum_before, checksum_after, 16))
15163 fold_check_failed (op0, tem);
15164 #endif
15165 return tem;
15168 /* Fold a binary tree expression with code CODE of type TYPE with
15169 operands OP0 and OP1. LOC is the location of the resulting
15170 expression. Return a folded expression if successful. Otherwise,
15171 return a tree expression with code CODE of type TYPE with operands
15172 OP0 and OP1. */
15174 tree
15175 fold_build2_stat_loc (location_t loc,
15176 enum tree_code code, tree type, tree op0, tree op1
15177 MEM_STAT_DECL)
15179 tree tem;
15180 #ifdef ENABLE_FOLD_CHECKING
15181 unsigned char checksum_before_op0[16],
15182 checksum_before_op1[16],
15183 checksum_after_op0[16],
15184 checksum_after_op1[16];
15185 struct md5_ctx ctx;
15186 hash_table <pointer_hash <tree_node> > ht;
15188 ht.create (32);
15189 md5_init_ctx (&ctx);
15190 fold_checksum_tree (op0, &ctx, ht);
15191 md5_finish_ctx (&ctx, checksum_before_op0);
15192 ht.empty ();
15194 md5_init_ctx (&ctx);
15195 fold_checksum_tree (op1, &ctx, ht);
15196 md5_finish_ctx (&ctx, checksum_before_op1);
15197 ht.empty ();
15198 #endif
15200 tem = fold_binary_loc (loc, code, type, op0, op1);
15201 if (!tem)
15202 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15204 #ifdef ENABLE_FOLD_CHECKING
15205 md5_init_ctx (&ctx);
15206 fold_checksum_tree (op0, &ctx, ht);
15207 md5_finish_ctx (&ctx, checksum_after_op0);
15208 ht.empty ();
15210 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15211 fold_check_failed (op0, tem);
15213 md5_init_ctx (&ctx);
15214 fold_checksum_tree (op1, &ctx, ht);
15215 md5_finish_ctx (&ctx, checksum_after_op1);
15216 ht.dispose ();
15218 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15219 fold_check_failed (op1, tem);
15220 #endif
15221 return tem;
15224 /* Fold a ternary tree expression with code CODE of type TYPE with
15225 operands OP0, OP1, and OP2. Return a folded expression if
15226 successful. Otherwise, return a tree expression with code CODE of
15227 type TYPE with operands OP0, OP1, and OP2. */
15229 tree
15230 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15231 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15233 tree tem;
15234 #ifdef ENABLE_FOLD_CHECKING
15235 unsigned char checksum_before_op0[16],
15236 checksum_before_op1[16],
15237 checksum_before_op2[16],
15238 checksum_after_op0[16],
15239 checksum_after_op1[16],
15240 checksum_after_op2[16];
15241 struct md5_ctx ctx;
15242 hash_table <pointer_hash <tree_node> > ht;
15244 ht.create (32);
15245 md5_init_ctx (&ctx);
15246 fold_checksum_tree (op0, &ctx, ht);
15247 md5_finish_ctx (&ctx, checksum_before_op0);
15248 ht.empty ();
15250 md5_init_ctx (&ctx);
15251 fold_checksum_tree (op1, &ctx, ht);
15252 md5_finish_ctx (&ctx, checksum_before_op1);
15253 ht.empty ();
15255 md5_init_ctx (&ctx);
15256 fold_checksum_tree (op2, &ctx, ht);
15257 md5_finish_ctx (&ctx, checksum_before_op2);
15258 ht.empty ();
15259 #endif
15261 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15262 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15263 if (!tem)
15264 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15266 #ifdef ENABLE_FOLD_CHECKING
15267 md5_init_ctx (&ctx);
15268 fold_checksum_tree (op0, &ctx, ht);
15269 md5_finish_ctx (&ctx, checksum_after_op0);
15270 ht.empty ();
15272 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15273 fold_check_failed (op0, tem);
15275 md5_init_ctx (&ctx);
15276 fold_checksum_tree (op1, &ctx, ht);
15277 md5_finish_ctx (&ctx, checksum_after_op1);
15278 ht.empty ();
15280 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15281 fold_check_failed (op1, tem);
15283 md5_init_ctx (&ctx);
15284 fold_checksum_tree (op2, &ctx, ht);
15285 md5_finish_ctx (&ctx, checksum_after_op2);
15286 ht.dispose ();
15288 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15289 fold_check_failed (op2, tem);
15290 #endif
15291 return tem;
15294 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15295 arguments in ARGARRAY, and a null static chain.
15296 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15297 of type TYPE from the given operands as constructed by build_call_array. */
15299 tree
15300 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15301 int nargs, tree *argarray)
15303 tree tem;
15304 #ifdef ENABLE_FOLD_CHECKING
15305 unsigned char checksum_before_fn[16],
15306 checksum_before_arglist[16],
15307 checksum_after_fn[16],
15308 checksum_after_arglist[16];
15309 struct md5_ctx ctx;
15310 hash_table <pointer_hash <tree_node> > ht;
15311 int i;
15313 ht.create (32);
15314 md5_init_ctx (&ctx);
15315 fold_checksum_tree (fn, &ctx, ht);
15316 md5_finish_ctx (&ctx, checksum_before_fn);
15317 ht.empty ();
15319 md5_init_ctx (&ctx);
15320 for (i = 0; i < nargs; i++)
15321 fold_checksum_tree (argarray[i], &ctx, ht);
15322 md5_finish_ctx (&ctx, checksum_before_arglist);
15323 ht.empty ();
15324 #endif
15326 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15328 #ifdef ENABLE_FOLD_CHECKING
15329 md5_init_ctx (&ctx);
15330 fold_checksum_tree (fn, &ctx, ht);
15331 md5_finish_ctx (&ctx, checksum_after_fn);
15332 ht.empty ();
15334 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15335 fold_check_failed (fn, tem);
15337 md5_init_ctx (&ctx);
15338 for (i = 0; i < nargs; i++)
15339 fold_checksum_tree (argarray[i], &ctx, ht);
15340 md5_finish_ctx (&ctx, checksum_after_arglist);
15341 ht.dispose ();
15343 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15344 fold_check_failed (NULL_TREE, tem);
15345 #endif
15346 return tem;
15349 /* Perform constant folding and related simplification of initializer
15350 expression EXPR. These behave identically to "fold_buildN" but ignore
15351 potential run-time traps and exceptions that fold must preserve. */
15353 #define START_FOLD_INIT \
15354 int saved_signaling_nans = flag_signaling_nans;\
15355 int saved_trapping_math = flag_trapping_math;\
15356 int saved_rounding_math = flag_rounding_math;\
15357 int saved_trapv = flag_trapv;\
15358 int saved_folding_initializer = folding_initializer;\
15359 flag_signaling_nans = 0;\
15360 flag_trapping_math = 0;\
15361 flag_rounding_math = 0;\
15362 flag_trapv = 0;\
15363 folding_initializer = 1;
15365 #define END_FOLD_INIT \
15366 flag_signaling_nans = saved_signaling_nans;\
15367 flag_trapping_math = saved_trapping_math;\
15368 flag_rounding_math = saved_rounding_math;\
15369 flag_trapv = saved_trapv;\
15370 folding_initializer = saved_folding_initializer;
15372 tree
15373 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15374 tree type, tree op)
15376 tree result;
15377 START_FOLD_INIT;
15379 result = fold_build1_loc (loc, code, type, op);
15381 END_FOLD_INIT;
15382 return result;
15385 tree
15386 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15387 tree type, tree op0, tree op1)
15389 tree result;
15390 START_FOLD_INIT;
15392 result = fold_build2_loc (loc, code, type, op0, op1);
15394 END_FOLD_INIT;
15395 return result;
15398 tree
15399 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15400 int nargs, tree *argarray)
15402 tree result;
15403 START_FOLD_INIT;
15405 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15407 END_FOLD_INIT;
15408 return result;
15411 #undef START_FOLD_INIT
15412 #undef END_FOLD_INIT
15414 /* Determine if first argument is a multiple of second argument. Return 0 if
15415 it is not, or we cannot easily determined it to be.
15417 An example of the sort of thing we care about (at this point; this routine
15418 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15419 fold cases do now) is discovering that
15421 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15423 is a multiple of
15425 SAVE_EXPR (J * 8)
15427 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15429 This code also handles discovering that
15431 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15433 is a multiple of 8 so we don't have to worry about dealing with a
15434 possible remainder.
15436 Note that we *look* inside a SAVE_EXPR only to determine how it was
15437 calculated; it is not safe for fold to do much of anything else with the
15438 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15439 at run time. For example, the latter example above *cannot* be implemented
15440 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15441 evaluation time of the original SAVE_EXPR is not necessarily the same at
15442 the time the new expression is evaluated. The only optimization of this
15443 sort that would be valid is changing
15445 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15447 divided by 8 to
15449 SAVE_EXPR (I) * SAVE_EXPR (J)
15451 (where the same SAVE_EXPR (J) is used in the original and the
15452 transformed version). */
15455 multiple_of_p (tree type, const_tree top, const_tree bottom)
15457 if (operand_equal_p (top, bottom, 0))
15458 return 1;
15460 if (TREE_CODE (type) != INTEGER_TYPE)
15461 return 0;
15463 switch (TREE_CODE (top))
15465 case BIT_AND_EXPR:
15466 /* Bitwise and provides a power of two multiple. If the mask is
15467 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15468 if (!integer_pow2p (bottom))
15469 return 0;
15470 /* FALLTHRU */
15472 case MULT_EXPR:
15473 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15474 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15476 case PLUS_EXPR:
15477 case MINUS_EXPR:
15478 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15479 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15481 case LSHIFT_EXPR:
15482 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15484 tree op1, t1;
15486 op1 = TREE_OPERAND (top, 1);
15487 /* const_binop may not detect overflow correctly,
15488 so check for it explicitly here. */
15489 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15490 > TREE_INT_CST_LOW (op1)
15491 && TREE_INT_CST_HIGH (op1) == 0
15492 && 0 != (t1 = fold_convert (type,
15493 const_binop (LSHIFT_EXPR,
15494 size_one_node,
15495 op1)))
15496 && !TREE_OVERFLOW (t1))
15497 return multiple_of_p (type, t1, bottom);
15499 return 0;
15501 case NOP_EXPR:
15502 /* Can't handle conversions from non-integral or wider integral type. */
15503 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15504 || (TYPE_PRECISION (type)
15505 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15506 return 0;
15508 /* .. fall through ... */
15510 case SAVE_EXPR:
15511 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15513 case COND_EXPR:
15514 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15515 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15517 case INTEGER_CST:
15518 if (TREE_CODE (bottom) != INTEGER_CST
15519 || integer_zerop (bottom)
15520 || (TYPE_UNSIGNED (type)
15521 && (tree_int_cst_sgn (top) < 0
15522 || tree_int_cst_sgn (bottom) < 0)))
15523 return 0;
15524 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15525 top, bottom));
15527 default:
15528 return 0;
15532 /* Return true if CODE or TYPE is known to be non-negative. */
15534 static bool
15535 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15537 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15538 && truth_value_p (code))
15539 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15540 have a signed:1 type (where the value is -1 and 0). */
15541 return true;
15542 return false;
15545 /* Return true if (CODE OP0) is known to be non-negative. If the return
15546 value is based on the assumption that signed overflow is undefined,
15547 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15548 *STRICT_OVERFLOW_P. */
15550 bool
15551 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15552 bool *strict_overflow_p)
15554 if (TYPE_UNSIGNED (type))
15555 return true;
15557 switch (code)
15559 case ABS_EXPR:
15560 /* We can't return 1 if flag_wrapv is set because
15561 ABS_EXPR<INT_MIN> = INT_MIN. */
15562 if (!INTEGRAL_TYPE_P (type))
15563 return true;
15564 if (TYPE_OVERFLOW_UNDEFINED (type))
15566 *strict_overflow_p = true;
15567 return true;
15569 break;
15571 case NON_LVALUE_EXPR:
15572 case FLOAT_EXPR:
15573 case FIX_TRUNC_EXPR:
15574 return tree_expr_nonnegative_warnv_p (op0,
15575 strict_overflow_p);
15577 case NOP_EXPR:
15579 tree inner_type = TREE_TYPE (op0);
15580 tree outer_type = type;
15582 if (TREE_CODE (outer_type) == REAL_TYPE)
15584 if (TREE_CODE (inner_type) == REAL_TYPE)
15585 return tree_expr_nonnegative_warnv_p (op0,
15586 strict_overflow_p);
15587 if (INTEGRAL_TYPE_P (inner_type))
15589 if (TYPE_UNSIGNED (inner_type))
15590 return true;
15591 return tree_expr_nonnegative_warnv_p (op0,
15592 strict_overflow_p);
15595 else if (INTEGRAL_TYPE_P (outer_type))
15597 if (TREE_CODE (inner_type) == REAL_TYPE)
15598 return tree_expr_nonnegative_warnv_p (op0,
15599 strict_overflow_p);
15600 if (INTEGRAL_TYPE_P (inner_type))
15601 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15602 && TYPE_UNSIGNED (inner_type);
15605 break;
15607 default:
15608 return tree_simple_nonnegative_warnv_p (code, type);
15611 /* We don't know sign of `t', so be conservative and return false. */
15612 return false;
15615 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15616 value is based on the assumption that signed overflow is undefined,
15617 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15618 *STRICT_OVERFLOW_P. */
15620 bool
15621 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15622 tree op1, bool *strict_overflow_p)
15624 if (TYPE_UNSIGNED (type))
15625 return true;
15627 switch (code)
15629 case POINTER_PLUS_EXPR:
15630 case PLUS_EXPR:
15631 if (FLOAT_TYPE_P (type))
15632 return (tree_expr_nonnegative_warnv_p (op0,
15633 strict_overflow_p)
15634 && tree_expr_nonnegative_warnv_p (op1,
15635 strict_overflow_p));
15637 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15638 both unsigned and at least 2 bits shorter than the result. */
15639 if (TREE_CODE (type) == INTEGER_TYPE
15640 && TREE_CODE (op0) == NOP_EXPR
15641 && TREE_CODE (op1) == NOP_EXPR)
15643 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15644 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15645 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15646 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15648 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15649 TYPE_PRECISION (inner2)) + 1;
15650 return prec < TYPE_PRECISION (type);
15653 break;
15655 case MULT_EXPR:
15656 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15658 /* x * x is always non-negative for floating point x
15659 or without overflow. */
15660 if (operand_equal_p (op0, op1, 0)
15661 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15662 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15664 if (TYPE_OVERFLOW_UNDEFINED (type))
15665 *strict_overflow_p = true;
15666 return true;
15670 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15671 both unsigned and their total bits is shorter than the result. */
15672 if (TREE_CODE (type) == INTEGER_TYPE
15673 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15674 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15676 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15677 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15678 : TREE_TYPE (op0);
15679 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15680 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15681 : TREE_TYPE (op1);
15683 bool unsigned0 = TYPE_UNSIGNED (inner0);
15684 bool unsigned1 = TYPE_UNSIGNED (inner1);
15686 if (TREE_CODE (op0) == INTEGER_CST)
15687 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15689 if (TREE_CODE (op1) == INTEGER_CST)
15690 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15692 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15693 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15695 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15696 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15697 : TYPE_PRECISION (inner0);
15699 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15700 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15701 : TYPE_PRECISION (inner1);
15703 return precision0 + precision1 < TYPE_PRECISION (type);
15706 return false;
15708 case BIT_AND_EXPR:
15709 case MAX_EXPR:
15710 return (tree_expr_nonnegative_warnv_p (op0,
15711 strict_overflow_p)
15712 || tree_expr_nonnegative_warnv_p (op1,
15713 strict_overflow_p));
15715 case BIT_IOR_EXPR:
15716 case BIT_XOR_EXPR:
15717 case MIN_EXPR:
15718 case RDIV_EXPR:
15719 case TRUNC_DIV_EXPR:
15720 case CEIL_DIV_EXPR:
15721 case FLOOR_DIV_EXPR:
15722 case ROUND_DIV_EXPR:
15723 return (tree_expr_nonnegative_warnv_p (op0,
15724 strict_overflow_p)
15725 && tree_expr_nonnegative_warnv_p (op1,
15726 strict_overflow_p));
15728 case TRUNC_MOD_EXPR:
15729 case CEIL_MOD_EXPR:
15730 case FLOOR_MOD_EXPR:
15731 case ROUND_MOD_EXPR:
15732 return tree_expr_nonnegative_warnv_p (op0,
15733 strict_overflow_p);
15734 default:
15735 return tree_simple_nonnegative_warnv_p (code, type);
15738 /* We don't know sign of `t', so be conservative and return false. */
15739 return false;
15742 /* Return true if T is known to be non-negative. If the return
15743 value is based on the assumption that signed overflow is undefined,
15744 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15745 *STRICT_OVERFLOW_P. */
15747 bool
15748 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15750 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15751 return true;
15753 switch (TREE_CODE (t))
15755 case INTEGER_CST:
15756 return tree_int_cst_sgn (t) >= 0;
15758 case REAL_CST:
15759 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15761 case FIXED_CST:
15762 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15764 case COND_EXPR:
15765 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15766 strict_overflow_p)
15767 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15768 strict_overflow_p));
15769 default:
15770 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15771 TREE_TYPE (t));
15773 /* We don't know sign of `t', so be conservative and return false. */
15774 return false;
15777 /* Return true if T is known to be non-negative. If the return
15778 value is based on the assumption that signed overflow is undefined,
15779 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15780 *STRICT_OVERFLOW_P. */
15782 bool
15783 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15784 tree arg0, tree arg1, bool *strict_overflow_p)
15786 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15787 switch (DECL_FUNCTION_CODE (fndecl))
15789 CASE_FLT_FN (BUILT_IN_ACOS):
15790 CASE_FLT_FN (BUILT_IN_ACOSH):
15791 CASE_FLT_FN (BUILT_IN_CABS):
15792 CASE_FLT_FN (BUILT_IN_COSH):
15793 CASE_FLT_FN (BUILT_IN_ERFC):
15794 CASE_FLT_FN (BUILT_IN_EXP):
15795 CASE_FLT_FN (BUILT_IN_EXP10):
15796 CASE_FLT_FN (BUILT_IN_EXP2):
15797 CASE_FLT_FN (BUILT_IN_FABS):
15798 CASE_FLT_FN (BUILT_IN_FDIM):
15799 CASE_FLT_FN (BUILT_IN_HYPOT):
15800 CASE_FLT_FN (BUILT_IN_POW10):
15801 CASE_INT_FN (BUILT_IN_FFS):
15802 CASE_INT_FN (BUILT_IN_PARITY):
15803 CASE_INT_FN (BUILT_IN_POPCOUNT):
15804 CASE_INT_FN (BUILT_IN_CLZ):
15805 CASE_INT_FN (BUILT_IN_CLRSB):
15806 case BUILT_IN_BSWAP32:
15807 case BUILT_IN_BSWAP64:
15808 /* Always true. */
15809 return true;
15811 CASE_FLT_FN (BUILT_IN_SQRT):
15812 /* sqrt(-0.0) is -0.0. */
15813 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15814 return true;
15815 return tree_expr_nonnegative_warnv_p (arg0,
15816 strict_overflow_p);
15818 CASE_FLT_FN (BUILT_IN_ASINH):
15819 CASE_FLT_FN (BUILT_IN_ATAN):
15820 CASE_FLT_FN (BUILT_IN_ATANH):
15821 CASE_FLT_FN (BUILT_IN_CBRT):
15822 CASE_FLT_FN (BUILT_IN_CEIL):
15823 CASE_FLT_FN (BUILT_IN_ERF):
15824 CASE_FLT_FN (BUILT_IN_EXPM1):
15825 CASE_FLT_FN (BUILT_IN_FLOOR):
15826 CASE_FLT_FN (BUILT_IN_FMOD):
15827 CASE_FLT_FN (BUILT_IN_FREXP):
15828 CASE_FLT_FN (BUILT_IN_ICEIL):
15829 CASE_FLT_FN (BUILT_IN_IFLOOR):
15830 CASE_FLT_FN (BUILT_IN_IRINT):
15831 CASE_FLT_FN (BUILT_IN_IROUND):
15832 CASE_FLT_FN (BUILT_IN_LCEIL):
15833 CASE_FLT_FN (BUILT_IN_LDEXP):
15834 CASE_FLT_FN (BUILT_IN_LFLOOR):
15835 CASE_FLT_FN (BUILT_IN_LLCEIL):
15836 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15837 CASE_FLT_FN (BUILT_IN_LLRINT):
15838 CASE_FLT_FN (BUILT_IN_LLROUND):
15839 CASE_FLT_FN (BUILT_IN_LRINT):
15840 CASE_FLT_FN (BUILT_IN_LROUND):
15841 CASE_FLT_FN (BUILT_IN_MODF):
15842 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15843 CASE_FLT_FN (BUILT_IN_RINT):
15844 CASE_FLT_FN (BUILT_IN_ROUND):
15845 CASE_FLT_FN (BUILT_IN_SCALB):
15846 CASE_FLT_FN (BUILT_IN_SCALBLN):
15847 CASE_FLT_FN (BUILT_IN_SCALBN):
15848 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15849 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15850 CASE_FLT_FN (BUILT_IN_SINH):
15851 CASE_FLT_FN (BUILT_IN_TANH):
15852 CASE_FLT_FN (BUILT_IN_TRUNC):
15853 /* True if the 1st argument is nonnegative. */
15854 return tree_expr_nonnegative_warnv_p (arg0,
15855 strict_overflow_p);
15857 CASE_FLT_FN (BUILT_IN_FMAX):
15858 /* True if the 1st OR 2nd arguments are nonnegative. */
15859 return (tree_expr_nonnegative_warnv_p (arg0,
15860 strict_overflow_p)
15861 || (tree_expr_nonnegative_warnv_p (arg1,
15862 strict_overflow_p)));
15864 CASE_FLT_FN (BUILT_IN_FMIN):
15865 /* True if the 1st AND 2nd arguments are nonnegative. */
15866 return (tree_expr_nonnegative_warnv_p (arg0,
15867 strict_overflow_p)
15868 && (tree_expr_nonnegative_warnv_p (arg1,
15869 strict_overflow_p)));
15871 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15872 /* True if the 2nd argument is nonnegative. */
15873 return tree_expr_nonnegative_warnv_p (arg1,
15874 strict_overflow_p);
15876 CASE_FLT_FN (BUILT_IN_POWI):
15877 /* True if the 1st argument is nonnegative or the second
15878 argument is an even integer. */
15879 if (TREE_CODE (arg1) == INTEGER_CST
15880 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15881 return true;
15882 return tree_expr_nonnegative_warnv_p (arg0,
15883 strict_overflow_p);
15885 CASE_FLT_FN (BUILT_IN_POW):
15886 /* True if the 1st argument is nonnegative or the second
15887 argument is an even integer valued real. */
15888 if (TREE_CODE (arg1) == REAL_CST)
15890 REAL_VALUE_TYPE c;
15891 HOST_WIDE_INT n;
15893 c = TREE_REAL_CST (arg1);
15894 n = real_to_integer (&c);
15895 if ((n & 1) == 0)
15897 REAL_VALUE_TYPE cint;
15898 real_from_integer (&cint, VOIDmode, n,
15899 n < 0 ? -1 : 0, 0);
15900 if (real_identical (&c, &cint))
15901 return true;
15904 return tree_expr_nonnegative_warnv_p (arg0,
15905 strict_overflow_p);
15907 default:
15908 break;
15910 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15911 type);
15914 /* Return true if T is known to be non-negative. If the return
15915 value is based on the assumption that signed overflow is undefined,
15916 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15917 *STRICT_OVERFLOW_P. */
15919 static bool
15920 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15922 enum tree_code code = TREE_CODE (t);
15923 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15924 return true;
15926 switch (code)
15928 case TARGET_EXPR:
15930 tree temp = TARGET_EXPR_SLOT (t);
15931 t = TARGET_EXPR_INITIAL (t);
15933 /* If the initializer is non-void, then it's a normal expression
15934 that will be assigned to the slot. */
15935 if (!VOID_TYPE_P (t))
15936 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15938 /* Otherwise, the initializer sets the slot in some way. One common
15939 way is an assignment statement at the end of the initializer. */
15940 while (1)
15942 if (TREE_CODE (t) == BIND_EXPR)
15943 t = expr_last (BIND_EXPR_BODY (t));
15944 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15945 || TREE_CODE (t) == TRY_CATCH_EXPR)
15946 t = expr_last (TREE_OPERAND (t, 0));
15947 else if (TREE_CODE (t) == STATEMENT_LIST)
15948 t = expr_last (t);
15949 else
15950 break;
15952 if (TREE_CODE (t) == MODIFY_EXPR
15953 && TREE_OPERAND (t, 0) == temp)
15954 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15955 strict_overflow_p);
15957 return false;
15960 case CALL_EXPR:
15962 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15963 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15965 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15966 get_callee_fndecl (t),
15967 arg0,
15968 arg1,
15969 strict_overflow_p);
15971 case COMPOUND_EXPR:
15972 case MODIFY_EXPR:
15973 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15974 strict_overflow_p);
15975 case BIND_EXPR:
15976 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15977 strict_overflow_p);
15978 case SAVE_EXPR:
15979 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15980 strict_overflow_p);
15982 default:
15983 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15984 TREE_TYPE (t));
15987 /* We don't know sign of `t', so be conservative and return false. */
15988 return false;
15991 /* Return true if T is known to be non-negative. If the return
15992 value is based on the assumption that signed overflow is undefined,
15993 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15994 *STRICT_OVERFLOW_P. */
15996 bool
15997 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15999 enum tree_code code;
16000 if (t == error_mark_node)
16001 return false;
16003 code = TREE_CODE (t);
16004 switch (TREE_CODE_CLASS (code))
16006 case tcc_binary:
16007 case tcc_comparison:
16008 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16009 TREE_TYPE (t),
16010 TREE_OPERAND (t, 0),
16011 TREE_OPERAND (t, 1),
16012 strict_overflow_p);
16014 case tcc_unary:
16015 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16016 TREE_TYPE (t),
16017 TREE_OPERAND (t, 0),
16018 strict_overflow_p);
16020 case tcc_constant:
16021 case tcc_declaration:
16022 case tcc_reference:
16023 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16025 default:
16026 break;
16029 switch (code)
16031 case TRUTH_AND_EXPR:
16032 case TRUTH_OR_EXPR:
16033 case TRUTH_XOR_EXPR:
16034 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16035 TREE_TYPE (t),
16036 TREE_OPERAND (t, 0),
16037 TREE_OPERAND (t, 1),
16038 strict_overflow_p);
16039 case TRUTH_NOT_EXPR:
16040 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16041 TREE_TYPE (t),
16042 TREE_OPERAND (t, 0),
16043 strict_overflow_p);
16045 case COND_EXPR:
16046 case CONSTRUCTOR:
16047 case OBJ_TYPE_REF:
16048 case ASSERT_EXPR:
16049 case ADDR_EXPR:
16050 case WITH_SIZE_EXPR:
16051 case SSA_NAME:
16052 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16054 default:
16055 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
16059 /* Return true if `t' is known to be non-negative. Handle warnings
16060 about undefined signed overflow. */
16062 bool
16063 tree_expr_nonnegative_p (tree t)
16065 bool ret, strict_overflow_p;
16067 strict_overflow_p = false;
16068 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
16069 if (strict_overflow_p)
16070 fold_overflow_warning (("assuming signed overflow does not occur when "
16071 "determining that expression is always "
16072 "non-negative"),
16073 WARN_STRICT_OVERFLOW_MISC);
16074 return ret;
16078 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16079 For floating point we further ensure that T is not denormal.
16080 Similar logic is present in nonzero_address in rtlanal.h.
16082 If the return value is based on the assumption that signed overflow
16083 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16084 change *STRICT_OVERFLOW_P. */
16086 bool
16087 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16088 bool *strict_overflow_p)
16090 switch (code)
16092 case ABS_EXPR:
16093 return tree_expr_nonzero_warnv_p (op0,
16094 strict_overflow_p);
16096 case NOP_EXPR:
16098 tree inner_type = TREE_TYPE (op0);
16099 tree outer_type = type;
16101 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16102 && tree_expr_nonzero_warnv_p (op0,
16103 strict_overflow_p));
16105 break;
16107 case NON_LVALUE_EXPR:
16108 return tree_expr_nonzero_warnv_p (op0,
16109 strict_overflow_p);
16111 default:
16112 break;
16115 return false;
16118 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16119 For floating point we further ensure that T is not denormal.
16120 Similar logic is present in nonzero_address in rtlanal.h.
16122 If the return value is based on the assumption that signed overflow
16123 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16124 change *STRICT_OVERFLOW_P. */
16126 bool
16127 tree_binary_nonzero_warnv_p (enum tree_code code,
16128 tree type,
16129 tree op0,
16130 tree op1, bool *strict_overflow_p)
16132 bool sub_strict_overflow_p;
16133 switch (code)
16135 case POINTER_PLUS_EXPR:
16136 case PLUS_EXPR:
16137 if (TYPE_OVERFLOW_UNDEFINED (type))
16139 /* With the presence of negative values it is hard
16140 to say something. */
16141 sub_strict_overflow_p = false;
16142 if (!tree_expr_nonnegative_warnv_p (op0,
16143 &sub_strict_overflow_p)
16144 || !tree_expr_nonnegative_warnv_p (op1,
16145 &sub_strict_overflow_p))
16146 return false;
16147 /* One of operands must be positive and the other non-negative. */
16148 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16149 overflows, on a twos-complement machine the sum of two
16150 nonnegative numbers can never be zero. */
16151 return (tree_expr_nonzero_warnv_p (op0,
16152 strict_overflow_p)
16153 || tree_expr_nonzero_warnv_p (op1,
16154 strict_overflow_p));
16156 break;
16158 case MULT_EXPR:
16159 if (TYPE_OVERFLOW_UNDEFINED (type))
16161 if (tree_expr_nonzero_warnv_p (op0,
16162 strict_overflow_p)
16163 && tree_expr_nonzero_warnv_p (op1,
16164 strict_overflow_p))
16166 *strict_overflow_p = true;
16167 return true;
16170 break;
16172 case MIN_EXPR:
16173 sub_strict_overflow_p = false;
16174 if (tree_expr_nonzero_warnv_p (op0,
16175 &sub_strict_overflow_p)
16176 && tree_expr_nonzero_warnv_p (op1,
16177 &sub_strict_overflow_p))
16179 if (sub_strict_overflow_p)
16180 *strict_overflow_p = true;
16182 break;
16184 case MAX_EXPR:
16185 sub_strict_overflow_p = false;
16186 if (tree_expr_nonzero_warnv_p (op0,
16187 &sub_strict_overflow_p))
16189 if (sub_strict_overflow_p)
16190 *strict_overflow_p = true;
16192 /* When both operands are nonzero, then MAX must be too. */
16193 if (tree_expr_nonzero_warnv_p (op1,
16194 strict_overflow_p))
16195 return true;
16197 /* MAX where operand 0 is positive is positive. */
16198 return tree_expr_nonnegative_warnv_p (op0,
16199 strict_overflow_p);
16201 /* MAX where operand 1 is positive is positive. */
16202 else if (tree_expr_nonzero_warnv_p (op1,
16203 &sub_strict_overflow_p)
16204 && tree_expr_nonnegative_warnv_p (op1,
16205 &sub_strict_overflow_p))
16207 if (sub_strict_overflow_p)
16208 *strict_overflow_p = true;
16209 return true;
16211 break;
16213 case BIT_IOR_EXPR:
16214 return (tree_expr_nonzero_warnv_p (op1,
16215 strict_overflow_p)
16216 || tree_expr_nonzero_warnv_p (op0,
16217 strict_overflow_p));
16219 default:
16220 break;
16223 return false;
16226 /* Return true when T is an address and is known to be nonzero.
16227 For floating point we further ensure that T is not denormal.
16228 Similar logic is present in nonzero_address in rtlanal.h.
16230 If the return value is based on the assumption that signed overflow
16231 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16232 change *STRICT_OVERFLOW_P. */
16234 bool
16235 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16237 bool sub_strict_overflow_p;
16238 switch (TREE_CODE (t))
16240 case INTEGER_CST:
16241 return !integer_zerop (t);
16243 case ADDR_EXPR:
16245 tree base = TREE_OPERAND (t, 0);
16246 if (!DECL_P (base))
16247 base = get_base_address (base);
16249 if (!base)
16250 return false;
16252 /* Weak declarations may link to NULL. Other things may also be NULL
16253 so protect with -fdelete-null-pointer-checks; but not variables
16254 allocated on the stack. */
16255 if (DECL_P (base)
16256 && (flag_delete_null_pointer_checks
16257 || (DECL_CONTEXT (base)
16258 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16259 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16260 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16262 /* Constants are never weak. */
16263 if (CONSTANT_CLASS_P (base))
16264 return true;
16266 return false;
16269 case COND_EXPR:
16270 sub_strict_overflow_p = false;
16271 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16272 &sub_strict_overflow_p)
16273 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16274 &sub_strict_overflow_p))
16276 if (sub_strict_overflow_p)
16277 *strict_overflow_p = true;
16278 return true;
16280 break;
16282 default:
16283 break;
16285 return false;
16288 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16289 attempt to fold the expression to a constant without modifying TYPE,
16290 OP0 or OP1.
16292 If the expression could be simplified to a constant, then return
16293 the constant. If the expression would not be simplified to a
16294 constant, then return NULL_TREE. */
16296 tree
16297 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16299 tree tem = fold_binary (code, type, op0, op1);
16300 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16303 /* Given the components of a unary expression CODE, TYPE and OP0,
16304 attempt to fold the expression to a constant without modifying
16305 TYPE or OP0.
16307 If the expression could be simplified to a constant, then return
16308 the constant. If the expression would not be simplified to a
16309 constant, then return NULL_TREE. */
16311 tree
16312 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16314 tree tem = fold_unary (code, type, op0);
16315 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16318 /* If EXP represents referencing an element in a constant string
16319 (either via pointer arithmetic or array indexing), return the
16320 tree representing the value accessed, otherwise return NULL. */
16322 tree
16323 fold_read_from_constant_string (tree exp)
16325 if ((TREE_CODE (exp) == INDIRECT_REF
16326 || TREE_CODE (exp) == ARRAY_REF)
16327 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16329 tree exp1 = TREE_OPERAND (exp, 0);
16330 tree index;
16331 tree string;
16332 location_t loc = EXPR_LOCATION (exp);
16334 if (TREE_CODE (exp) == INDIRECT_REF)
16335 string = string_constant (exp1, &index);
16336 else
16338 tree low_bound = array_ref_low_bound (exp);
16339 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16341 /* Optimize the special-case of a zero lower bound.
16343 We convert the low_bound to sizetype to avoid some problems
16344 with constant folding. (E.g. suppose the lower bound is 1,
16345 and its mode is QI. Without the conversion,l (ARRAY
16346 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16347 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16348 if (! integer_zerop (low_bound))
16349 index = size_diffop_loc (loc, index,
16350 fold_convert_loc (loc, sizetype, low_bound));
16352 string = exp1;
16355 if (string
16356 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16357 && TREE_CODE (string) == STRING_CST
16358 && TREE_CODE (index) == INTEGER_CST
16359 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16360 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16361 == MODE_INT)
16362 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16363 return build_int_cst_type (TREE_TYPE (exp),
16364 (TREE_STRING_POINTER (string)
16365 [TREE_INT_CST_LOW (index)]));
16367 return NULL;
16370 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16371 an integer constant, real, or fixed-point constant.
16373 TYPE is the type of the result. */
16375 static tree
16376 fold_negate_const (tree arg0, tree type)
16378 tree t = NULL_TREE;
16380 switch (TREE_CODE (arg0))
16382 case INTEGER_CST:
16384 double_int val = tree_to_double_int (arg0);
16385 bool overflow;
16386 val = val.neg_with_overflow (&overflow);
16387 t = force_fit_type_double (type, val, 1,
16388 (overflow | TREE_OVERFLOW (arg0))
16389 && !TYPE_UNSIGNED (type));
16390 break;
16393 case REAL_CST:
16394 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16395 break;
16397 case FIXED_CST:
16399 FIXED_VALUE_TYPE f;
16400 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16401 &(TREE_FIXED_CST (arg0)), NULL,
16402 TYPE_SATURATING (type));
16403 t = build_fixed (type, f);
16404 /* Propagate overflow flags. */
16405 if (overflow_p | TREE_OVERFLOW (arg0))
16406 TREE_OVERFLOW (t) = 1;
16407 break;
16410 default:
16411 gcc_unreachable ();
16414 return t;
16417 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16418 an integer constant or real constant.
16420 TYPE is the type of the result. */
16422 tree
16423 fold_abs_const (tree arg0, tree type)
16425 tree t = NULL_TREE;
16427 switch (TREE_CODE (arg0))
16429 case INTEGER_CST:
16431 double_int val = tree_to_double_int (arg0);
16433 /* If the value is unsigned or non-negative, then the absolute value
16434 is the same as the ordinary value. */
16435 if (TYPE_UNSIGNED (type)
16436 || !val.is_negative ())
16437 t = arg0;
16439 /* If the value is negative, then the absolute value is
16440 its negation. */
16441 else
16443 bool overflow;
16444 val = val.neg_with_overflow (&overflow);
16445 t = force_fit_type_double (type, val, -1,
16446 overflow | TREE_OVERFLOW (arg0));
16449 break;
16451 case REAL_CST:
16452 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16453 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16454 else
16455 t = arg0;
16456 break;
16458 default:
16459 gcc_unreachable ();
16462 return t;
16465 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16466 constant. TYPE is the type of the result. */
16468 static tree
16469 fold_not_const (const_tree arg0, tree type)
16471 double_int val;
16473 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16475 val = ~tree_to_double_int (arg0);
16476 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16479 /* Given CODE, a relational operator, the target type, TYPE and two
16480 constant operands OP0 and OP1, return the result of the
16481 relational operation. If the result is not a compile time
16482 constant, then return NULL_TREE. */
16484 static tree
16485 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16487 int result, invert;
16489 /* From here on, the only cases we handle are when the result is
16490 known to be a constant. */
16492 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16494 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16495 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16497 /* Handle the cases where either operand is a NaN. */
16498 if (real_isnan (c0) || real_isnan (c1))
16500 switch (code)
16502 case EQ_EXPR:
16503 case ORDERED_EXPR:
16504 result = 0;
16505 break;
16507 case NE_EXPR:
16508 case UNORDERED_EXPR:
16509 case UNLT_EXPR:
16510 case UNLE_EXPR:
16511 case UNGT_EXPR:
16512 case UNGE_EXPR:
16513 case UNEQ_EXPR:
16514 result = 1;
16515 break;
16517 case LT_EXPR:
16518 case LE_EXPR:
16519 case GT_EXPR:
16520 case GE_EXPR:
16521 case LTGT_EXPR:
16522 if (flag_trapping_math)
16523 return NULL_TREE;
16524 result = 0;
16525 break;
16527 default:
16528 gcc_unreachable ();
16531 return constant_boolean_node (result, type);
16534 return constant_boolean_node (real_compare (code, c0, c1), type);
16537 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16539 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16540 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16541 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16544 /* Handle equality/inequality of complex constants. */
16545 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16547 tree rcond = fold_relational_const (code, type,
16548 TREE_REALPART (op0),
16549 TREE_REALPART (op1));
16550 tree icond = fold_relational_const (code, type,
16551 TREE_IMAGPART (op0),
16552 TREE_IMAGPART (op1));
16553 if (code == EQ_EXPR)
16554 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16555 else if (code == NE_EXPR)
16556 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16557 else
16558 return NULL_TREE;
16561 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16563 unsigned count = VECTOR_CST_NELTS (op0);
16564 tree *elts = XALLOCAVEC (tree, count);
16565 gcc_assert (VECTOR_CST_NELTS (op1) == count
16566 && TYPE_VECTOR_SUBPARTS (type) == count);
16568 for (unsigned i = 0; i < count; i++)
16570 tree elem_type = TREE_TYPE (type);
16571 tree elem0 = VECTOR_CST_ELT (op0, i);
16572 tree elem1 = VECTOR_CST_ELT (op1, i);
16574 tree tem = fold_relational_const (code, elem_type,
16575 elem0, elem1);
16577 if (tem == NULL_TREE)
16578 return NULL_TREE;
16580 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16583 return build_vector (type, elts);
16586 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16588 To compute GT, swap the arguments and do LT.
16589 To compute GE, do LT and invert the result.
16590 To compute LE, swap the arguments, do LT and invert the result.
16591 To compute NE, do EQ and invert the result.
16593 Therefore, the code below must handle only EQ and LT. */
16595 if (code == LE_EXPR || code == GT_EXPR)
16597 tree tem = op0;
16598 op0 = op1;
16599 op1 = tem;
16600 code = swap_tree_comparison (code);
16603 /* Note that it is safe to invert for real values here because we
16604 have already handled the one case that it matters. */
16606 invert = 0;
16607 if (code == NE_EXPR || code == GE_EXPR)
16609 invert = 1;
16610 code = invert_tree_comparison (code, false);
16613 /* Compute a result for LT or EQ if args permit;
16614 Otherwise return T. */
16615 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16617 if (code == EQ_EXPR)
16618 result = tree_int_cst_equal (op0, op1);
16619 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16620 result = INT_CST_LT_UNSIGNED (op0, op1);
16621 else
16622 result = INT_CST_LT (op0, op1);
16624 else
16625 return NULL_TREE;
16627 if (invert)
16628 result ^= 1;
16629 return constant_boolean_node (result, type);
16632 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16633 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16634 itself. */
16636 tree
16637 fold_build_cleanup_point_expr (tree type, tree expr)
16639 /* If the expression does not have side effects then we don't have to wrap
16640 it with a cleanup point expression. */
16641 if (!TREE_SIDE_EFFECTS (expr))
16642 return expr;
16644 /* If the expression is a return, check to see if the expression inside the
16645 return has no side effects or the right hand side of the modify expression
16646 inside the return. If either don't have side effects set we don't need to
16647 wrap the expression in a cleanup point expression. Note we don't check the
16648 left hand side of the modify because it should always be a return decl. */
16649 if (TREE_CODE (expr) == RETURN_EXPR)
16651 tree op = TREE_OPERAND (expr, 0);
16652 if (!op || !TREE_SIDE_EFFECTS (op))
16653 return expr;
16654 op = TREE_OPERAND (op, 1);
16655 if (!TREE_SIDE_EFFECTS (op))
16656 return expr;
16659 return build1 (CLEANUP_POINT_EXPR, type, expr);
16662 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16663 of an indirection through OP0, or NULL_TREE if no simplification is
16664 possible. */
16666 tree
16667 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16669 tree sub = op0;
16670 tree subtype;
16672 STRIP_NOPS (sub);
16673 subtype = TREE_TYPE (sub);
16674 if (!POINTER_TYPE_P (subtype))
16675 return NULL_TREE;
16677 if (TREE_CODE (sub) == ADDR_EXPR)
16679 tree op = TREE_OPERAND (sub, 0);
16680 tree optype = TREE_TYPE (op);
16681 /* *&CONST_DECL -> to the value of the const decl. */
16682 if (TREE_CODE (op) == CONST_DECL)
16683 return DECL_INITIAL (op);
16684 /* *&p => p; make sure to handle *&"str"[cst] here. */
16685 if (type == optype)
16687 tree fop = fold_read_from_constant_string (op);
16688 if (fop)
16689 return fop;
16690 else
16691 return op;
16693 /* *(foo *)&fooarray => fooarray[0] */
16694 else if (TREE_CODE (optype) == ARRAY_TYPE
16695 && type == TREE_TYPE (optype)
16696 && (!in_gimple_form
16697 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16699 tree type_domain = TYPE_DOMAIN (optype);
16700 tree min_val = size_zero_node;
16701 if (type_domain && TYPE_MIN_VALUE (type_domain))
16702 min_val = TYPE_MIN_VALUE (type_domain);
16703 if (in_gimple_form
16704 && TREE_CODE (min_val) != INTEGER_CST)
16705 return NULL_TREE;
16706 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16707 NULL_TREE, NULL_TREE);
16709 /* *(foo *)&complexfoo => __real__ complexfoo */
16710 else if (TREE_CODE (optype) == COMPLEX_TYPE
16711 && type == TREE_TYPE (optype))
16712 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16713 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16714 else if (TREE_CODE (optype) == VECTOR_TYPE
16715 && type == TREE_TYPE (optype))
16717 tree part_width = TYPE_SIZE (type);
16718 tree index = bitsize_int (0);
16719 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16723 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16724 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16726 tree op00 = TREE_OPERAND (sub, 0);
16727 tree op01 = TREE_OPERAND (sub, 1);
16729 STRIP_NOPS (op00);
16730 if (TREE_CODE (op00) == ADDR_EXPR)
16732 tree op00type;
16733 op00 = TREE_OPERAND (op00, 0);
16734 op00type = TREE_TYPE (op00);
16736 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16737 if (TREE_CODE (op00type) == VECTOR_TYPE
16738 && type == TREE_TYPE (op00type))
16740 HOST_WIDE_INT offset = tree_to_shwi (op01);
16741 tree part_width = TYPE_SIZE (type);
16742 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16743 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16744 tree index = bitsize_int (indexi);
16746 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16747 return fold_build3_loc (loc,
16748 BIT_FIELD_REF, type, op00,
16749 part_width, index);
16752 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16753 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16754 && type == TREE_TYPE (op00type))
16756 tree size = TYPE_SIZE_UNIT (type);
16757 if (tree_int_cst_equal (size, op01))
16758 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16760 /* ((foo *)&fooarray)[1] => fooarray[1] */
16761 else if (TREE_CODE (op00type) == ARRAY_TYPE
16762 && type == TREE_TYPE (op00type))
16764 tree type_domain = TYPE_DOMAIN (op00type);
16765 tree min_val = size_zero_node;
16766 if (type_domain && TYPE_MIN_VALUE (type_domain))
16767 min_val = TYPE_MIN_VALUE (type_domain);
16768 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16769 TYPE_SIZE_UNIT (type));
16770 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16771 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16772 NULL_TREE, NULL_TREE);
16777 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16778 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16779 && type == TREE_TYPE (TREE_TYPE (subtype))
16780 && (!in_gimple_form
16781 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16783 tree type_domain;
16784 tree min_val = size_zero_node;
16785 sub = build_fold_indirect_ref_loc (loc, sub);
16786 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16787 if (type_domain && TYPE_MIN_VALUE (type_domain))
16788 min_val = TYPE_MIN_VALUE (type_domain);
16789 if (in_gimple_form
16790 && TREE_CODE (min_val) != INTEGER_CST)
16791 return NULL_TREE;
16792 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16793 NULL_TREE);
16796 return NULL_TREE;
16799 /* Builds an expression for an indirection through T, simplifying some
16800 cases. */
16802 tree
16803 build_fold_indirect_ref_loc (location_t loc, tree t)
16805 tree type = TREE_TYPE (TREE_TYPE (t));
16806 tree sub = fold_indirect_ref_1 (loc, type, t);
16808 if (sub)
16809 return sub;
16811 return build1_loc (loc, INDIRECT_REF, type, t);
16814 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16816 tree
16817 fold_indirect_ref_loc (location_t loc, tree t)
16819 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16821 if (sub)
16822 return sub;
16823 else
16824 return t;
16827 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16828 whose result is ignored. The type of the returned tree need not be
16829 the same as the original expression. */
16831 tree
16832 fold_ignored_result (tree t)
16834 if (!TREE_SIDE_EFFECTS (t))
16835 return integer_zero_node;
16837 for (;;)
16838 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16840 case tcc_unary:
16841 t = TREE_OPERAND (t, 0);
16842 break;
16844 case tcc_binary:
16845 case tcc_comparison:
16846 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16847 t = TREE_OPERAND (t, 0);
16848 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16849 t = TREE_OPERAND (t, 1);
16850 else
16851 return t;
16852 break;
16854 case tcc_expression:
16855 switch (TREE_CODE (t))
16857 case COMPOUND_EXPR:
16858 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16859 return t;
16860 t = TREE_OPERAND (t, 0);
16861 break;
16863 case COND_EXPR:
16864 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16865 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16866 return t;
16867 t = TREE_OPERAND (t, 0);
16868 break;
16870 default:
16871 return t;
16873 break;
16875 default:
16876 return t;
16880 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16881 This can only be applied to objects of a sizetype. */
16883 tree
16884 round_up_loc (location_t loc, tree value, int divisor)
16886 tree div = NULL_TREE;
16888 gcc_assert (divisor > 0);
16889 if (divisor == 1)
16890 return value;
16892 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16893 have to do anything. Only do this when we are not given a const,
16894 because in that case, this check is more expensive than just
16895 doing it. */
16896 if (TREE_CODE (value) != INTEGER_CST)
16898 div = build_int_cst (TREE_TYPE (value), divisor);
16900 if (multiple_of_p (TREE_TYPE (value), value, div))
16901 return value;
16904 /* If divisor is a power of two, simplify this to bit manipulation. */
16905 if (divisor == (divisor & -divisor))
16907 if (TREE_CODE (value) == INTEGER_CST)
16909 double_int val = tree_to_double_int (value);
16910 bool overflow_p;
16912 if ((val.low & (divisor - 1)) == 0)
16913 return value;
16915 overflow_p = TREE_OVERFLOW (value);
16916 val.low &= ~(divisor - 1);
16917 val.low += divisor;
16918 if (val.low == 0)
16920 val.high++;
16921 if (val.high == 0)
16922 overflow_p = true;
16925 return force_fit_type_double (TREE_TYPE (value), val,
16926 -1, overflow_p);
16928 else
16930 tree t;
16932 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16933 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16934 t = build_int_cst (TREE_TYPE (value), -divisor);
16935 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16938 else
16940 if (!div)
16941 div = build_int_cst (TREE_TYPE (value), divisor);
16942 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16943 value = size_binop_loc (loc, MULT_EXPR, value, div);
16946 return value;
16949 /* Likewise, but round down. */
16951 tree
16952 round_down_loc (location_t loc, tree value, int divisor)
16954 tree div = NULL_TREE;
16956 gcc_assert (divisor > 0);
16957 if (divisor == 1)
16958 return value;
16960 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16961 have to do anything. Only do this when we are not given a const,
16962 because in that case, this check is more expensive than just
16963 doing it. */
16964 if (TREE_CODE (value) != INTEGER_CST)
16966 div = build_int_cst (TREE_TYPE (value), divisor);
16968 if (multiple_of_p (TREE_TYPE (value), value, div))
16969 return value;
16972 /* If divisor is a power of two, simplify this to bit manipulation. */
16973 if (divisor == (divisor & -divisor))
16975 tree t;
16977 t = build_int_cst (TREE_TYPE (value), -divisor);
16978 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16980 else
16982 if (!div)
16983 div = build_int_cst (TREE_TYPE (value), divisor);
16984 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16985 value = size_binop_loc (loc, MULT_EXPR, value, div);
16988 return value;
16991 /* Returns the pointer to the base of the object addressed by EXP and
16992 extracts the information about the offset of the access, storing it
16993 to PBITPOS and POFFSET. */
16995 static tree
16996 split_address_to_core_and_offset (tree exp,
16997 HOST_WIDE_INT *pbitpos, tree *poffset)
16999 tree core;
17000 enum machine_mode mode;
17001 int unsignedp, volatilep;
17002 HOST_WIDE_INT bitsize;
17003 location_t loc = EXPR_LOCATION (exp);
17005 if (TREE_CODE (exp) == ADDR_EXPR)
17007 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
17008 poffset, &mode, &unsignedp, &volatilep,
17009 false);
17010 core = build_fold_addr_expr_loc (loc, core);
17012 else
17014 core = exp;
17015 *pbitpos = 0;
17016 *poffset = NULL_TREE;
17019 return core;
17022 /* Returns true if addresses of E1 and E2 differ by a constant, false
17023 otherwise. If they do, E1 - E2 is stored in *DIFF. */
17025 bool
17026 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
17028 tree core1, core2;
17029 HOST_WIDE_INT bitpos1, bitpos2;
17030 tree toffset1, toffset2, tdiff, type;
17032 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17033 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17035 if (bitpos1 % BITS_PER_UNIT != 0
17036 || bitpos2 % BITS_PER_UNIT != 0
17037 || !operand_equal_p (core1, core2, 0))
17038 return false;
17040 if (toffset1 && toffset2)
17042 type = TREE_TYPE (toffset1);
17043 if (type != TREE_TYPE (toffset2))
17044 toffset2 = fold_convert (type, toffset2);
17046 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17047 if (!cst_and_fits_in_hwi (tdiff))
17048 return false;
17050 *diff = int_cst_value (tdiff);
17052 else if (toffset1 || toffset2)
17054 /* If only one of the offsets is non-constant, the difference cannot
17055 be a constant. */
17056 return false;
17058 else
17059 *diff = 0;
17061 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17062 return true;
17065 /* Simplify the floating point expression EXP when the sign of the
17066 result is not significant. Return NULL_TREE if no simplification
17067 is possible. */
17069 tree
17070 fold_strip_sign_ops (tree exp)
17072 tree arg0, arg1;
17073 location_t loc = EXPR_LOCATION (exp);
17075 switch (TREE_CODE (exp))
17077 case ABS_EXPR:
17078 case NEGATE_EXPR:
17079 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17080 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17082 case MULT_EXPR:
17083 case RDIV_EXPR:
17084 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17085 return NULL_TREE;
17086 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17087 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17088 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17089 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17090 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17091 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17092 break;
17094 case COMPOUND_EXPR:
17095 arg0 = TREE_OPERAND (exp, 0);
17096 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17097 if (arg1)
17098 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17099 break;
17101 case COND_EXPR:
17102 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17103 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17104 if (arg0 || arg1)
17105 return fold_build3_loc (loc,
17106 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17107 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17108 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17109 break;
17111 case CALL_EXPR:
17113 const enum built_in_function fcode = builtin_mathfn_code (exp);
17114 switch (fcode)
17116 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17117 /* Strip copysign function call, return the 1st argument. */
17118 arg0 = CALL_EXPR_ARG (exp, 0);
17119 arg1 = CALL_EXPR_ARG (exp, 1);
17120 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17122 default:
17123 /* Strip sign ops from the argument of "odd" math functions. */
17124 if (negate_mathfn_p (fcode))
17126 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17127 if (arg0)
17128 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17130 break;
17133 break;
17135 default:
17136 break;
17138 return NULL_TREE;