Mark ChangeLog
[official-gcc.git] / gcc / fold-const.c
blob9e7536a7b213d6d17f96eb4cc5d6ff99d1c07214
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
454 /* Fall through. */
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case EXACT_DIV_EXPR:
465 /* In general we can't negate A / B, because if A is INT_MIN and
466 B is 1, we may turn this into INT_MIN / -1 which is undefined
467 and actually traps on some architectures. But if overflow is
468 undefined, we can negate, because - (INT_MIN / 1) is an
469 overflow. */
470 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
472 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
473 break;
474 /* If overflow is undefined then we have to be careful because
475 we ask whether it's ok to associate the negate with the
476 division which is not ok for example for
477 -((a - b) / c) where (-(a - b)) / c may invoke undefined
478 overflow because of negating INT_MIN. So do not use
479 negate_expr_p here but open-code the two important cases. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
481 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
482 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
483 return true;
485 else if (negate_expr_p (TREE_OPERAND (t, 0)))
486 return true;
487 return negate_expr_p (TREE_OPERAND (t, 1));
489 case NOP_EXPR:
490 /* Negate -((double)float) as (double)(-float). */
491 if (TREE_CODE (type) == REAL_TYPE)
493 tree tem = strip_float_extensions (t);
494 if (tem != t)
495 return negate_expr_p (tem);
497 break;
499 case CALL_EXPR:
500 /* Negate -f(x) as f(-x). */
501 if (negate_mathfn_p (builtin_mathfn_code (t)))
502 return negate_expr_p (CALL_EXPR_ARG (t, 0));
503 break;
505 case RSHIFT_EXPR:
506 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
507 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
509 tree op1 = TREE_OPERAND (t, 1);
510 if (TREE_INT_CST_HIGH (op1) == 0
511 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
512 == TREE_INT_CST_LOW (op1))
513 return true;
515 break;
517 default:
518 break;
520 return false;
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
526 returned. */
528 static tree
529 fold_negate_expr (location_t loc, tree t)
531 tree type = TREE_TYPE (t);
532 tree tem;
534 switch (TREE_CODE (t))
536 /* Convert - (~A) to A + 1. */
537 case BIT_NOT_EXPR:
538 if (INTEGRAL_TYPE_P (type))
539 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540 build_int_cst (type, 1));
541 break;
543 case INTEGER_CST:
544 tem = fold_negate_const (t, type);
545 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 || !TYPE_OVERFLOW_TRAPS (type))
547 return tem;
548 break;
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
554 return tem;
555 break;
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
561 case COMPLEX_CST:
563 tree rpart = negate_expr (TREE_REALPART (t));
564 tree ipart = negate_expr (TREE_IMAGPART (t));
566 if ((TREE_CODE (rpart) == REAL_CST
567 && TREE_CODE (ipart) == REAL_CST)
568 || (TREE_CODE (rpart) == INTEGER_CST
569 && TREE_CODE (ipart) == INTEGER_CST))
570 return build_complex (type, rpart, ipart);
572 break;
574 case COMPLEX_EXPR:
575 if (negate_expr_p (t))
576 return fold_build2_loc (loc, COMPLEX_EXPR, type,
577 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
578 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
579 break;
581 case CONJ_EXPR:
582 if (negate_expr_p (t))
583 return fold_build1_loc (loc, CONJ_EXPR, type,
584 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
585 break;
587 case NEGATE_EXPR:
588 return TREE_OPERAND (t, 0);
590 case PLUS_EXPR:
591 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
592 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
594 /* -(A + B) -> (-B) - A. */
595 if (negate_expr_p (TREE_OPERAND (t, 1))
596 && reorder_operands_p (TREE_OPERAND (t, 0),
597 TREE_OPERAND (t, 1)))
599 tem = negate_expr (TREE_OPERAND (t, 1));
600 return fold_build2_loc (loc, MINUS_EXPR, type,
601 tem, TREE_OPERAND (t, 0));
604 /* -(A + B) -> (-A) - B. */
605 if (negate_expr_p (TREE_OPERAND (t, 0)))
607 tem = negate_expr (TREE_OPERAND (t, 0));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 1));
612 break;
614 case MINUS_EXPR:
615 /* - (A - B) -> B - A */
616 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
617 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
618 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
619 return fold_build2_loc (loc, MINUS_EXPR, type,
620 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
621 break;
623 case MULT_EXPR:
624 if (TYPE_UNSIGNED (type))
625 break;
627 /* Fall through. */
629 case RDIV_EXPR:
630 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
632 tem = TREE_OPERAND (t, 1);
633 if (negate_expr_p (tem))
634 return fold_build2_loc (loc, TREE_CODE (t), type,
635 TREE_OPERAND (t, 0), negate_expr (tem));
636 tem = TREE_OPERAND (t, 0);
637 if (negate_expr_p (tem))
638 return fold_build2_loc (loc, TREE_CODE (t), type,
639 negate_expr (tem), TREE_OPERAND (t, 1));
641 break;
643 case TRUNC_DIV_EXPR:
644 case ROUND_DIV_EXPR:
645 case EXACT_DIV_EXPR:
646 /* In general we can't negate A / B, because if A is INT_MIN and
647 B is 1, we may turn this into INT_MIN / -1 which is undefined
648 and actually traps on some architectures. But if overflow is
649 undefined, we can negate, because - (INT_MIN / 1) is an
650 overflow. */
651 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
653 const char * const warnmsg = G_("assuming signed overflow does not "
654 "occur when negating a division");
655 tem = TREE_OPERAND (t, 1);
656 if (negate_expr_p (tem))
658 if (INTEGRAL_TYPE_P (type)
659 && (TREE_CODE (tem) != INTEGER_CST
660 || integer_onep (tem)))
661 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 TREE_OPERAND (t, 0), negate_expr (tem));
665 /* If overflow is undefined then we have to be careful because
666 we ask whether it's ok to associate the negate with the
667 division which is not ok for example for
668 -((a - b) / c) where (-(a - b)) / c may invoke undefined
669 overflow because of negating INT_MIN. So do not use
670 negate_expr_p here but open-code the two important cases. */
671 tem = TREE_OPERAND (t, 0);
672 if ((INTEGRAL_TYPE_P (type)
673 && (TREE_CODE (tem) == NEGATE_EXPR
674 || (TREE_CODE (tem) == INTEGER_CST
675 && may_negate_without_overflow_p (tem))))
676 || !INTEGRAL_TYPE_P (type))
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 negate_expr (tem), TREE_OPERAND (t, 1));
680 break;
682 case NOP_EXPR:
683 /* Convert -((double)float) into (double)(-float). */
684 if (TREE_CODE (type) == REAL_TYPE)
686 tem = strip_float_extensions (t);
687 if (tem != t && negate_expr_p (tem))
688 return fold_convert_loc (loc, type, negate_expr (tem));
690 break;
692 case CALL_EXPR:
693 /* Negate -f(x) as f(-x). */
694 if (negate_mathfn_p (builtin_mathfn_code (t))
695 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
697 tree fndecl, arg;
699 fndecl = get_callee_fndecl (t);
700 arg = negate_expr (CALL_EXPR_ARG (t, 0));
701 return build_call_expr_loc (loc, fndecl, 1, arg);
703 break;
705 case RSHIFT_EXPR:
706 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
707 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
709 tree op1 = TREE_OPERAND (t, 1);
710 if (TREE_INT_CST_HIGH (op1) == 0
711 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
712 == TREE_INT_CST_LOW (op1))
714 tree ntype = TYPE_UNSIGNED (type)
715 ? signed_type_for (type)
716 : unsigned_type_for (type);
717 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
718 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
719 return fold_convert_loc (loc, type, temp);
722 break;
724 default:
725 break;
728 return NULL_TREE;
731 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
732 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
733 return NULL_TREE. */
735 static tree
736 negate_expr (tree t)
738 tree type, tem;
739 location_t loc;
741 if (t == NULL_TREE)
742 return NULL_TREE;
744 loc = EXPR_LOCATION (t);
745 type = TREE_TYPE (t);
746 STRIP_SIGN_NOPS (t);
748 tem = fold_negate_expr (loc, t);
749 if (!tem)
750 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
751 return fold_convert_loc (loc, type, tem);
754 /* Split a tree IN into a constant, literal and variable parts that could be
755 combined with CODE to make IN. "constant" means an expression with
756 TREE_CONSTANT but that isn't an actual constant. CODE must be a
757 commutative arithmetic operation. Store the constant part into *CONP,
758 the literal in *LITP and return the variable part. If a part isn't
759 present, set it to null. If the tree does not decompose in this way,
760 return the entire tree as the variable part and the other parts as null.
762 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
763 case, we negate an operand that was subtracted. Except if it is a
764 literal for which we use *MINUS_LITP instead.
766 If NEGATE_P is true, we are negating all of IN, again except a literal
767 for which we use *MINUS_LITP instead.
769 If IN is itself a literal or constant, return it as appropriate.
771 Note that we do not guarantee that any of the three values will be the
772 same type as IN, but they will have the same signedness and mode. */
774 static tree
775 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
776 tree *minus_litp, int negate_p)
778 tree var = 0;
780 *conp = 0;
781 *litp = 0;
782 *minus_litp = 0;
784 /* Strip any conversions that don't change the machine mode or signedness. */
785 STRIP_SIGN_NOPS (in);
787 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
788 || TREE_CODE (in) == FIXED_CST)
789 *litp = in;
790 else if (TREE_CODE (in) == code
791 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
792 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
793 /* We can associate addition and subtraction together (even
794 though the C standard doesn't say so) for integers because
795 the value is not affected. For reals, the value might be
796 affected, so we can't. */
797 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
798 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
800 tree op0 = TREE_OPERAND (in, 0);
801 tree op1 = TREE_OPERAND (in, 1);
802 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
803 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
805 /* First see if either of the operands is a literal, then a constant. */
806 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
807 || TREE_CODE (op0) == FIXED_CST)
808 *litp = op0, op0 = 0;
809 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
810 || TREE_CODE (op1) == FIXED_CST)
811 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
813 if (op0 != 0 && TREE_CONSTANT (op0))
814 *conp = op0, op0 = 0;
815 else if (op1 != 0 && TREE_CONSTANT (op1))
816 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
818 /* If we haven't dealt with either operand, this is not a case we can
819 decompose. Otherwise, VAR is either of the ones remaining, if any. */
820 if (op0 != 0 && op1 != 0)
821 var = in;
822 else if (op0 != 0)
823 var = op0;
824 else
825 var = op1, neg_var_p = neg1_p;
827 /* Now do any needed negations. */
828 if (neg_litp_p)
829 *minus_litp = *litp, *litp = 0;
830 if (neg_conp_p)
831 *conp = negate_expr (*conp);
832 if (neg_var_p)
833 var = negate_expr (var);
835 else if (TREE_CODE (in) == BIT_NOT_EXPR
836 && code == PLUS_EXPR)
838 /* -X - 1 is folded to ~X, undo that here. */
839 *minus_litp = build_one_cst (TREE_TYPE (in));
840 var = negate_expr (TREE_OPERAND (in, 0));
842 else if (TREE_CONSTANT (in))
843 *conp = in;
844 else
845 var = in;
847 if (negate_p)
849 if (*litp)
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
857 return var;
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
865 static tree
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
868 if (t1 == 0)
869 return t2;
870 else if (t2 == 0)
871 return t1;
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 if (code == PLUS_EXPR)
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
894 else if (code == MINUS_EXPR)
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
911 static bool
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
915 return false;
916 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
917 return false;
919 switch (code)
921 case LSHIFT_EXPR:
922 case RSHIFT_EXPR:
923 case LROTATE_EXPR:
924 case RROTATE_EXPR:
925 return true;
927 default:
928 break;
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time. */
941 static tree
942 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
943 int overflowable)
945 double_int op1, op2, res, tmp;
946 tree t;
947 tree type = TREE_TYPE (arg1);
948 bool uns = TYPE_UNSIGNED (type);
949 bool overflow = false;
951 op1 = tree_to_double_int (arg1);
952 op2 = tree_to_double_int (arg2);
954 switch (code)
956 case BIT_IOR_EXPR:
957 res = op1 | op2;
958 break;
960 case BIT_XOR_EXPR:
961 res = op1 ^ op2;
962 break;
964 case BIT_AND_EXPR:
965 res = op1 & op2;
966 break;
968 case RSHIFT_EXPR:
969 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
970 break;
972 case LSHIFT_EXPR:
973 /* It's unclear from the C standard whether shifts can overflow.
974 The following code ignores overflow; perhaps a C standard
975 interpretation ruling is needed. */
976 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
977 break;
979 case RROTATE_EXPR:
980 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
981 break;
983 case LROTATE_EXPR:
984 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
985 break;
987 case PLUS_EXPR:
988 res = op1.add_with_sign (op2, false, &overflow);
989 break;
991 case MINUS_EXPR:
992 res = op1.sub_with_overflow (op2, &overflow);
993 break;
995 case MULT_EXPR:
996 res = op1.mul_with_sign (op2, false, &overflow);
997 break;
999 case MULT_HIGHPART_EXPR:
1000 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1001 return NULL_TREE;
1002 else
1004 bool dummy_overflow;
1005 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1006 is performed in twice the precision of arguments. */
1007 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1008 res = tmp.rshift (TYPE_PRECISION (type),
1009 2 * TYPE_PRECISION (type), !uns);
1011 break;
1013 case TRUNC_DIV_EXPR:
1014 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1015 case EXACT_DIV_EXPR:
1016 /* This is a shortcut for a common special case. */
1017 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1018 && !TREE_OVERFLOW (arg1)
1019 && !TREE_OVERFLOW (arg2)
1020 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1022 if (code == CEIL_DIV_EXPR)
1023 op1.low += op2.low - 1;
1025 res.low = op1.low / op2.low, res.high = 0;
1026 break;
1029 /* ... fall through ... */
1031 case ROUND_DIV_EXPR:
1032 if (op2.is_zero ())
1033 return NULL_TREE;
1034 if (op2.is_one ())
1036 res = op1;
1037 break;
1039 if (op1 == op2 && !op1.is_zero ())
1041 res = double_int_one;
1042 break;
1044 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1045 break;
1047 case TRUNC_MOD_EXPR:
1048 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1049 /* This is a shortcut for a common special case. */
1050 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1051 && !TREE_OVERFLOW (arg1)
1052 && !TREE_OVERFLOW (arg2)
1053 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1055 if (code == CEIL_MOD_EXPR)
1056 op1.low += op2.low - 1;
1057 res.low = op1.low % op2.low, res.high = 0;
1058 break;
1061 /* ... fall through ... */
1063 case ROUND_MOD_EXPR:
1064 if (op2.is_zero ())
1065 return NULL_TREE;
1066 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1067 break;
1069 case MIN_EXPR:
1070 res = op1.min (op2, uns);
1071 break;
1073 case MAX_EXPR:
1074 res = op1.max (op2, uns);
1075 break;
1077 default:
1078 return NULL_TREE;
1081 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1082 (!uns && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1085 return t;
1088 tree
1089 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1091 return int_const_binop_1 (code, arg1, arg2, 1);
1094 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1095 constant. We assume ARG1 and ARG2 have the same data type, or at least
1096 are the same kind of constant and the same machine mode. Return zero if
1097 combining the constants is not allowed in the current operating mode. */
1099 static tree
1100 const_binop (enum tree_code code, tree arg1, tree arg2)
1102 /* Sanity check for the recursive cases. */
1103 if (!arg1 || !arg2)
1104 return NULL_TREE;
1106 STRIP_NOPS (arg1);
1107 STRIP_NOPS (arg2);
1109 if (TREE_CODE (arg1) == INTEGER_CST)
1110 return int_const_binop (code, arg1, arg2);
1112 if (TREE_CODE (arg1) == REAL_CST)
1114 enum machine_mode mode;
1115 REAL_VALUE_TYPE d1;
1116 REAL_VALUE_TYPE d2;
1117 REAL_VALUE_TYPE value;
1118 REAL_VALUE_TYPE result;
1119 bool inexact;
1120 tree t, type;
1122 /* The following codes are handled by real_arithmetic. */
1123 switch (code)
1125 case PLUS_EXPR:
1126 case MINUS_EXPR:
1127 case MULT_EXPR:
1128 case RDIV_EXPR:
1129 case MIN_EXPR:
1130 case MAX_EXPR:
1131 break;
1133 default:
1134 return NULL_TREE;
1137 d1 = TREE_REAL_CST (arg1);
1138 d2 = TREE_REAL_CST (arg2);
1140 type = TREE_TYPE (arg1);
1141 mode = TYPE_MODE (type);
1143 /* Don't perform operation if we honor signaling NaNs and
1144 either operand is a NaN. */
1145 if (HONOR_SNANS (mode)
1146 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1147 return NULL_TREE;
1149 /* Don't perform operation if it would raise a division
1150 by zero exception. */
1151 if (code == RDIV_EXPR
1152 && REAL_VALUES_EQUAL (d2, dconst0)
1153 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1154 return NULL_TREE;
1156 /* If either operand is a NaN, just return it. Otherwise, set up
1157 for floating-point trap; we return an overflow. */
1158 if (REAL_VALUE_ISNAN (d1))
1159 return arg1;
1160 else if (REAL_VALUE_ISNAN (d2))
1161 return arg2;
1163 inexact = real_arithmetic (&value, code, &d1, &d2);
1164 real_convert (&result, mode, &value);
1166 /* Don't constant fold this floating point operation if
1167 the result has overflowed and flag_trapping_math. */
1168 if (flag_trapping_math
1169 && MODE_HAS_INFINITIES (mode)
1170 && REAL_VALUE_ISINF (result)
1171 && !REAL_VALUE_ISINF (d1)
1172 && !REAL_VALUE_ISINF (d2))
1173 return NULL_TREE;
1175 /* Don't constant fold this floating point operation if the
1176 result may dependent upon the run-time rounding mode and
1177 flag_rounding_math is set, or if GCC's software emulation
1178 is unable to accurately represent the result. */
1179 if ((flag_rounding_math
1180 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1181 && (inexact || !real_identical (&result, &value)))
1182 return NULL_TREE;
1184 t = build_real (type, result);
1186 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1187 return t;
1190 if (TREE_CODE (arg1) == FIXED_CST)
1192 FIXED_VALUE_TYPE f1;
1193 FIXED_VALUE_TYPE f2;
1194 FIXED_VALUE_TYPE result;
1195 tree t, type;
1196 int sat_p;
1197 bool overflow_p;
1199 /* The following codes are handled by fixed_arithmetic. */
1200 switch (code)
1202 case PLUS_EXPR:
1203 case MINUS_EXPR:
1204 case MULT_EXPR:
1205 case TRUNC_DIV_EXPR:
1206 f2 = TREE_FIXED_CST (arg2);
1207 break;
1209 case LSHIFT_EXPR:
1210 case RSHIFT_EXPR:
1211 f2.data.high = TREE_INT_CST_HIGH (arg2);
1212 f2.data.low = TREE_INT_CST_LOW (arg2);
1213 f2.mode = SImode;
1214 break;
1216 default:
1217 return NULL_TREE;
1220 f1 = TREE_FIXED_CST (arg1);
1221 type = TREE_TYPE (arg1);
1222 sat_p = TYPE_SATURATING (type);
1223 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1224 t = build_fixed (type, result);
1225 /* Propagate overflow flags. */
1226 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1227 TREE_OVERFLOW (t) = 1;
1228 return t;
1231 if (TREE_CODE (arg1) == COMPLEX_CST)
1233 tree type = TREE_TYPE (arg1);
1234 tree r1 = TREE_REALPART (arg1);
1235 tree i1 = TREE_IMAGPART (arg1);
1236 tree r2 = TREE_REALPART (arg2);
1237 tree i2 = TREE_IMAGPART (arg2);
1238 tree real, imag;
1240 switch (code)
1242 case PLUS_EXPR:
1243 case MINUS_EXPR:
1244 real = const_binop (code, r1, r2);
1245 imag = const_binop (code, i1, i2);
1246 break;
1248 case MULT_EXPR:
1249 if (COMPLEX_FLOAT_TYPE_P (type))
1250 return do_mpc_arg2 (arg1, arg2, type,
1251 /* do_nonfinite= */ folding_initializer,
1252 mpc_mul);
1254 real = const_binop (MINUS_EXPR,
1255 const_binop (MULT_EXPR, r1, r2),
1256 const_binop (MULT_EXPR, i1, i2));
1257 imag = const_binop (PLUS_EXPR,
1258 const_binop (MULT_EXPR, r1, i2),
1259 const_binop (MULT_EXPR, i1, r2));
1260 break;
1262 case RDIV_EXPR:
1263 if (COMPLEX_FLOAT_TYPE_P (type))
1264 return do_mpc_arg2 (arg1, arg2, type,
1265 /* do_nonfinite= */ folding_initializer,
1266 mpc_div);
1267 /* Fallthru ... */
1268 case TRUNC_DIV_EXPR:
1269 case CEIL_DIV_EXPR:
1270 case FLOOR_DIV_EXPR:
1271 case ROUND_DIV_EXPR:
1272 if (flag_complex_method == 0)
1274 /* Keep this algorithm in sync with
1275 tree-complex.c:expand_complex_div_straight().
1277 Expand complex division to scalars, straightforward algorithm.
1278 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1279 t = br*br + bi*bi
1281 tree magsquared
1282 = const_binop (PLUS_EXPR,
1283 const_binop (MULT_EXPR, r2, r2),
1284 const_binop (MULT_EXPR, i2, i2));
1285 tree t1
1286 = const_binop (PLUS_EXPR,
1287 const_binop (MULT_EXPR, r1, r2),
1288 const_binop (MULT_EXPR, i1, i2));
1289 tree t2
1290 = const_binop (MINUS_EXPR,
1291 const_binop (MULT_EXPR, i1, r2),
1292 const_binop (MULT_EXPR, r1, i2));
1294 real = const_binop (code, t1, magsquared);
1295 imag = const_binop (code, t2, magsquared);
1297 else
1299 /* Keep this algorithm in sync with
1300 tree-complex.c:expand_complex_div_wide().
1302 Expand complex division to scalars, modified algorithm to minimize
1303 overflow with wide input ranges. */
1304 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1305 fold_abs_const (r2, TREE_TYPE (type)),
1306 fold_abs_const (i2, TREE_TYPE (type)));
1308 if (integer_nonzerop (compare))
1310 /* In the TRUE branch, we compute
1311 ratio = br/bi;
1312 div = (br * ratio) + bi;
1313 tr = (ar * ratio) + ai;
1314 ti = (ai * ratio) - ar;
1315 tr = tr / div;
1316 ti = ti / div; */
1317 tree ratio = const_binop (code, r2, i2);
1318 tree div = const_binop (PLUS_EXPR, i2,
1319 const_binop (MULT_EXPR, r2, ratio));
1320 real = const_binop (MULT_EXPR, r1, ratio);
1321 real = const_binop (PLUS_EXPR, real, i1);
1322 real = const_binop (code, real, div);
1324 imag = const_binop (MULT_EXPR, i1, ratio);
1325 imag = const_binop (MINUS_EXPR, imag, r1);
1326 imag = const_binop (code, imag, div);
1328 else
1330 /* In the FALSE branch, we compute
1331 ratio = d/c;
1332 divisor = (d * ratio) + c;
1333 tr = (b * ratio) + a;
1334 ti = b - (a * ratio);
1335 tr = tr / div;
1336 ti = ti / div; */
1337 tree ratio = const_binop (code, i2, r2);
1338 tree div = const_binop (PLUS_EXPR, r2,
1339 const_binop (MULT_EXPR, i2, ratio));
1341 real = const_binop (MULT_EXPR, i1, ratio);
1342 real = const_binop (PLUS_EXPR, real, r1);
1343 real = const_binop (code, real, div);
1345 imag = const_binop (MULT_EXPR, r1, ratio);
1346 imag = const_binop (MINUS_EXPR, i1, imag);
1347 imag = const_binop (code, imag, div);
1350 break;
1352 default:
1353 return NULL_TREE;
1356 if (real && imag)
1357 return build_complex (type, real, imag);
1360 if (TREE_CODE (arg1) == VECTOR_CST
1361 && TREE_CODE (arg2) == VECTOR_CST)
1363 tree type = TREE_TYPE(arg1);
1364 int count = TYPE_VECTOR_SUBPARTS (type), i;
1365 tree *elts = XALLOCAVEC (tree, count);
1367 for (i = 0; i < count; i++)
1369 tree elem1 = VECTOR_CST_ELT (arg1, i);
1370 tree elem2 = VECTOR_CST_ELT (arg2, i);
1372 elts[i] = const_binop (code, elem1, elem2);
1374 /* It is possible that const_binop cannot handle the given
1375 code and return NULL_TREE */
1376 if(elts[i] == NULL_TREE)
1377 return NULL_TREE;
1380 return build_vector (type, elts);
1383 /* Shifts allow a scalar offset for a vector. */
1384 if (TREE_CODE (arg1) == VECTOR_CST
1385 && TREE_CODE (arg2) == INTEGER_CST)
1387 tree type = TREE_TYPE (arg1);
1388 int count = TYPE_VECTOR_SUBPARTS (type), i;
1389 tree *elts = XALLOCAVEC (tree, count);
1391 if (code == VEC_LSHIFT_EXPR
1392 || code == VEC_RSHIFT_EXPR)
1394 if (!host_integerp (arg2, 1))
1395 return NULL_TREE;
1397 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1398 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1399 unsigned HOST_WIDE_INT innerc
1400 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1401 if (shiftc >= outerc || (shiftc % innerc) != 0)
1402 return NULL_TREE;
1403 int offset = shiftc / innerc;
1404 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1405 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1406 for !BYTES_BIG_ENDIAN picks first vector element, but
1407 for BYTES_BIG_ENDIAN last element from the vector. */
1408 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1409 offset = -offset;
1410 tree zero = build_zero_cst (TREE_TYPE (type));
1411 for (i = 0; i < count; i++)
1413 if (i + offset < 0 || i + offset >= count)
1414 elts[i] = zero;
1415 else
1416 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1419 else
1420 return NULL_TREE;
1422 return build_vector (type, elts);
1424 return NULL_TREE;
1427 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1428 indicates which particular sizetype to create. */
1430 tree
1431 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1433 return build_int_cst (sizetype_tab[(int) kind], number);
1436 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1437 is a tree code. The type of the result is taken from the operands.
1438 Both must be equivalent integer types, ala int_binop_types_match_p.
1439 If the operands are constant, so is the result. */
1441 tree
1442 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1444 tree type = TREE_TYPE (arg0);
1446 if (arg0 == error_mark_node || arg1 == error_mark_node)
1447 return error_mark_node;
1449 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1450 TREE_TYPE (arg1)));
1452 /* Handle the special case of two integer constants faster. */
1453 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1455 /* And some specific cases even faster than that. */
1456 if (code == PLUS_EXPR)
1458 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1459 return arg1;
1460 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1461 return arg0;
1463 else if (code == MINUS_EXPR)
1465 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1466 return arg0;
1468 else if (code == MULT_EXPR)
1470 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1471 return arg1;
1474 /* Handle general case of two integer constants. For sizetype
1475 constant calculations we always want to know about overflow,
1476 even in the unsigned case. */
1477 return int_const_binop_1 (code, arg0, arg1, -1);
1480 return fold_build2_loc (loc, code, type, arg0, arg1);
1483 /* Given two values, either both of sizetype or both of bitsizetype,
1484 compute the difference between the two values. Return the value
1485 in signed type corresponding to the type of the operands. */
1487 tree
1488 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1490 tree type = TREE_TYPE (arg0);
1491 tree ctype;
1493 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1494 TREE_TYPE (arg1)));
1496 /* If the type is already signed, just do the simple thing. */
1497 if (!TYPE_UNSIGNED (type))
1498 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1500 if (type == sizetype)
1501 ctype = ssizetype;
1502 else if (type == bitsizetype)
1503 ctype = sbitsizetype;
1504 else
1505 ctype = signed_type_for (type);
1507 /* If either operand is not a constant, do the conversions to the signed
1508 type and subtract. The hardware will do the right thing with any
1509 overflow in the subtraction. */
1510 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1511 return size_binop_loc (loc, MINUS_EXPR,
1512 fold_convert_loc (loc, ctype, arg0),
1513 fold_convert_loc (loc, ctype, arg1));
1515 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1516 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1517 overflow) and negate (which can't either). Special-case a result
1518 of zero while we're here. */
1519 if (tree_int_cst_equal (arg0, arg1))
1520 return build_int_cst (ctype, 0);
1521 else if (tree_int_cst_lt (arg1, arg0))
1522 return fold_convert_loc (loc, ctype,
1523 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1524 else
1525 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1526 fold_convert_loc (loc, ctype,
1527 size_binop_loc (loc,
1528 MINUS_EXPR,
1529 arg1, arg0)));
1532 /* A subroutine of fold_convert_const handling conversions of an
1533 INTEGER_CST to another integer type. */
1535 static tree
1536 fold_convert_const_int_from_int (tree type, const_tree arg1)
1538 tree t;
1540 /* Given an integer constant, make new constant with new type,
1541 appropriately sign-extended or truncated. */
1542 t = force_fit_type_double (type, tree_to_double_int (arg1),
1543 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1544 (TREE_INT_CST_HIGH (arg1) < 0
1545 && (TYPE_UNSIGNED (type)
1546 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1547 | TREE_OVERFLOW (arg1));
1549 return t;
1552 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1553 to an integer type. */
1555 static tree
1556 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1558 int overflow = 0;
1559 tree t;
1561 /* The following code implements the floating point to integer
1562 conversion rules required by the Java Language Specification,
1563 that IEEE NaNs are mapped to zero and values that overflow
1564 the target precision saturate, i.e. values greater than
1565 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1566 are mapped to INT_MIN. These semantics are allowed by the
1567 C and C++ standards that simply state that the behavior of
1568 FP-to-integer conversion is unspecified upon overflow. */
1570 double_int val;
1571 REAL_VALUE_TYPE r;
1572 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1574 switch (code)
1576 case FIX_TRUNC_EXPR:
1577 real_trunc (&r, VOIDmode, &x);
1578 break;
1580 default:
1581 gcc_unreachable ();
1584 /* If R is NaN, return zero and show we have an overflow. */
1585 if (REAL_VALUE_ISNAN (r))
1587 overflow = 1;
1588 val = double_int_zero;
1591 /* See if R is less than the lower bound or greater than the
1592 upper bound. */
1594 if (! overflow)
1596 tree lt = TYPE_MIN_VALUE (type);
1597 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1598 if (REAL_VALUES_LESS (r, l))
1600 overflow = 1;
1601 val = tree_to_double_int (lt);
1605 if (! overflow)
1607 tree ut = TYPE_MAX_VALUE (type);
1608 if (ut)
1610 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1611 if (REAL_VALUES_LESS (u, r))
1613 overflow = 1;
1614 val = tree_to_double_int (ut);
1619 if (! overflow)
1620 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1622 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1623 return t;
1626 /* A subroutine of fold_convert_const handling conversions of a
1627 FIXED_CST to an integer type. */
1629 static tree
1630 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1632 tree t;
1633 double_int temp, temp_trunc;
1634 unsigned int mode;
1636 /* Right shift FIXED_CST to temp by fbit. */
1637 temp = TREE_FIXED_CST (arg1).data;
1638 mode = TREE_FIXED_CST (arg1).mode;
1639 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1641 temp = temp.rshift (GET_MODE_FBIT (mode),
1642 HOST_BITS_PER_DOUBLE_INT,
1643 SIGNED_FIXED_POINT_MODE_P (mode));
1645 /* Left shift temp to temp_trunc by fbit. */
1646 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1647 HOST_BITS_PER_DOUBLE_INT,
1648 SIGNED_FIXED_POINT_MODE_P (mode));
1650 else
1652 temp = double_int_zero;
1653 temp_trunc = double_int_zero;
1656 /* If FIXED_CST is negative, we need to round the value toward 0.
1657 By checking if the fractional bits are not zero to add 1 to temp. */
1658 if (SIGNED_FIXED_POINT_MODE_P (mode)
1659 && temp_trunc.is_negative ()
1660 && TREE_FIXED_CST (arg1).data != temp_trunc)
1661 temp += double_int_one;
1663 /* Given a fixed-point constant, make new constant with new type,
1664 appropriately sign-extended or truncated. */
1665 t = force_fit_type_double (type, temp, -1,
1666 (temp.is_negative ()
1667 && (TYPE_UNSIGNED (type)
1668 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1669 | TREE_OVERFLOW (arg1));
1671 return t;
1674 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1675 to another floating point type. */
1677 static tree
1678 fold_convert_const_real_from_real (tree type, const_tree arg1)
1680 REAL_VALUE_TYPE value;
1681 tree t;
1683 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1684 t = build_real (type, value);
1686 /* If converting an infinity or NAN to a representation that doesn't
1687 have one, set the overflow bit so that we can produce some kind of
1688 error message at the appropriate point if necessary. It's not the
1689 most user-friendly message, but it's better than nothing. */
1690 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1691 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1692 TREE_OVERFLOW (t) = 1;
1693 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1694 && !MODE_HAS_NANS (TYPE_MODE (type)))
1695 TREE_OVERFLOW (t) = 1;
1696 /* Regular overflow, conversion produced an infinity in a mode that
1697 can't represent them. */
1698 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1699 && REAL_VALUE_ISINF (value)
1700 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1701 TREE_OVERFLOW (t) = 1;
1702 else
1703 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1704 return t;
1707 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1708 to a floating point type. */
1710 static tree
1711 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1713 REAL_VALUE_TYPE value;
1714 tree t;
1716 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1717 t = build_real (type, value);
1719 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1720 return t;
1723 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1724 to another fixed-point type. */
1726 static tree
1727 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1729 FIXED_VALUE_TYPE value;
1730 tree t;
1731 bool overflow_p;
1733 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1734 TYPE_SATURATING (type));
1735 t = build_fixed (type, value);
1737 /* Propagate overflow flags. */
1738 if (overflow_p | TREE_OVERFLOW (arg1))
1739 TREE_OVERFLOW (t) = 1;
1740 return t;
1743 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1744 to a fixed-point type. */
1746 static tree
1747 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1749 FIXED_VALUE_TYPE value;
1750 tree t;
1751 bool overflow_p;
1753 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1754 TREE_INT_CST (arg1),
1755 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1756 TYPE_SATURATING (type));
1757 t = build_fixed (type, value);
1759 /* Propagate overflow flags. */
1760 if (overflow_p | TREE_OVERFLOW (arg1))
1761 TREE_OVERFLOW (t) = 1;
1762 return t;
1765 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1766 to a fixed-point type. */
1768 static tree
1769 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1771 FIXED_VALUE_TYPE value;
1772 tree t;
1773 bool overflow_p;
1775 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1776 &TREE_REAL_CST (arg1),
1777 TYPE_SATURATING (type));
1778 t = build_fixed (type, value);
1780 /* Propagate overflow flags. */
1781 if (overflow_p | TREE_OVERFLOW (arg1))
1782 TREE_OVERFLOW (t) = 1;
1783 return t;
1786 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1787 type TYPE. If no simplification can be done return NULL_TREE. */
1789 static tree
1790 fold_convert_const (enum tree_code code, tree type, tree arg1)
1792 if (TREE_TYPE (arg1) == type)
1793 return arg1;
1795 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1796 || TREE_CODE (type) == OFFSET_TYPE)
1798 if (TREE_CODE (arg1) == INTEGER_CST)
1799 return fold_convert_const_int_from_int (type, arg1);
1800 else if (TREE_CODE (arg1) == REAL_CST)
1801 return fold_convert_const_int_from_real (code, type, arg1);
1802 else if (TREE_CODE (arg1) == FIXED_CST)
1803 return fold_convert_const_int_from_fixed (type, arg1);
1805 else if (TREE_CODE (type) == REAL_TYPE)
1807 if (TREE_CODE (arg1) == INTEGER_CST)
1808 return build_real_from_int_cst (type, arg1);
1809 else if (TREE_CODE (arg1) == REAL_CST)
1810 return fold_convert_const_real_from_real (type, arg1);
1811 else if (TREE_CODE (arg1) == FIXED_CST)
1812 return fold_convert_const_real_from_fixed (type, arg1);
1814 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1816 if (TREE_CODE (arg1) == FIXED_CST)
1817 return fold_convert_const_fixed_from_fixed (type, arg1);
1818 else if (TREE_CODE (arg1) == INTEGER_CST)
1819 return fold_convert_const_fixed_from_int (type, arg1);
1820 else if (TREE_CODE (arg1) == REAL_CST)
1821 return fold_convert_const_fixed_from_real (type, arg1);
1823 return NULL_TREE;
1826 /* Construct a vector of zero elements of vector type TYPE. */
1828 static tree
1829 build_zero_vector (tree type)
1831 tree t;
1833 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1834 return build_vector_from_val (type, t);
1837 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1839 bool
1840 fold_convertible_p (const_tree type, const_tree arg)
1842 tree orig = TREE_TYPE (arg);
1844 if (type == orig)
1845 return true;
1847 if (TREE_CODE (arg) == ERROR_MARK
1848 || TREE_CODE (type) == ERROR_MARK
1849 || TREE_CODE (orig) == ERROR_MARK)
1850 return false;
1852 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1853 return true;
1855 switch (TREE_CODE (type))
1857 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1858 case POINTER_TYPE: case REFERENCE_TYPE:
1859 case OFFSET_TYPE:
1860 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1861 || TREE_CODE (orig) == OFFSET_TYPE)
1862 return true;
1863 return (TREE_CODE (orig) == VECTOR_TYPE
1864 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1866 case REAL_TYPE:
1867 case FIXED_POINT_TYPE:
1868 case COMPLEX_TYPE:
1869 case VECTOR_TYPE:
1870 case VOID_TYPE:
1871 return TREE_CODE (type) == TREE_CODE (orig);
1873 default:
1874 return false;
1878 /* Convert expression ARG to type TYPE. Used by the middle-end for
1879 simple conversions in preference to calling the front-end's convert. */
1881 tree
1882 fold_convert_loc (location_t loc, tree type, tree arg)
1884 tree orig = TREE_TYPE (arg);
1885 tree tem;
1887 if (type == orig)
1888 return arg;
1890 if (TREE_CODE (arg) == ERROR_MARK
1891 || TREE_CODE (type) == ERROR_MARK
1892 || TREE_CODE (orig) == ERROR_MARK)
1893 return error_mark_node;
1895 switch (TREE_CODE (type))
1897 case POINTER_TYPE:
1898 case REFERENCE_TYPE:
1899 /* Handle conversions between pointers to different address spaces. */
1900 if (POINTER_TYPE_P (orig)
1901 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1902 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1903 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1904 /* fall through */
1906 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1907 case OFFSET_TYPE:
1908 if (TREE_CODE (arg) == INTEGER_CST)
1910 tem = fold_convert_const (NOP_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1912 return tem;
1914 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1915 || TREE_CODE (orig) == OFFSET_TYPE)
1916 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1917 if (TREE_CODE (orig) == COMPLEX_TYPE)
1918 return fold_convert_loc (loc, type,
1919 fold_build1_loc (loc, REALPART_EXPR,
1920 TREE_TYPE (orig), arg));
1921 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1922 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1923 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1925 case REAL_TYPE:
1926 if (TREE_CODE (arg) == INTEGER_CST)
1928 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1929 if (tem != NULL_TREE)
1930 return tem;
1932 else if (TREE_CODE (arg) == REAL_CST)
1934 tem = fold_convert_const (NOP_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1936 return tem;
1938 else if (TREE_CODE (arg) == FIXED_CST)
1940 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1941 if (tem != NULL_TREE)
1942 return tem;
1945 switch (TREE_CODE (orig))
1947 case INTEGER_TYPE:
1948 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1949 case POINTER_TYPE: case REFERENCE_TYPE:
1950 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1952 case REAL_TYPE:
1953 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1955 case FIXED_POINT_TYPE:
1956 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1958 case COMPLEX_TYPE:
1959 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1960 return fold_convert_loc (loc, type, tem);
1962 default:
1963 gcc_unreachable ();
1966 case FIXED_POINT_TYPE:
1967 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1968 || TREE_CODE (arg) == REAL_CST)
1970 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1971 if (tem != NULL_TREE)
1972 goto fold_convert_exit;
1975 switch (TREE_CODE (orig))
1977 case FIXED_POINT_TYPE:
1978 case INTEGER_TYPE:
1979 case ENUMERAL_TYPE:
1980 case BOOLEAN_TYPE:
1981 case REAL_TYPE:
1982 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1984 case COMPLEX_TYPE:
1985 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1986 return fold_convert_loc (loc, type, tem);
1988 default:
1989 gcc_unreachable ();
1992 case COMPLEX_TYPE:
1993 switch (TREE_CODE (orig))
1995 case INTEGER_TYPE:
1996 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1997 case POINTER_TYPE: case REFERENCE_TYPE:
1998 case REAL_TYPE:
1999 case FIXED_POINT_TYPE:
2000 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2001 fold_convert_loc (loc, TREE_TYPE (type), arg),
2002 fold_convert_loc (loc, TREE_TYPE (type),
2003 integer_zero_node));
2004 case COMPLEX_TYPE:
2006 tree rpart, ipart;
2008 if (TREE_CODE (arg) == COMPLEX_EXPR)
2010 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2011 TREE_OPERAND (arg, 0));
2012 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2013 TREE_OPERAND (arg, 1));
2014 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2017 arg = save_expr (arg);
2018 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2019 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2020 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2021 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2022 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2025 default:
2026 gcc_unreachable ();
2029 case VECTOR_TYPE:
2030 if (integer_zerop (arg))
2031 return build_zero_vector (type);
2032 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2033 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2034 || TREE_CODE (orig) == VECTOR_TYPE);
2035 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2037 case VOID_TYPE:
2038 tem = fold_ignored_result (arg);
2039 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2041 default:
2042 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2043 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2044 gcc_unreachable ();
2046 fold_convert_exit:
2047 protected_set_expr_location_unshare (tem, loc);
2048 return tem;
2051 /* Return false if expr can be assumed not to be an lvalue, true
2052 otherwise. */
2054 static bool
2055 maybe_lvalue_p (const_tree x)
2057 /* We only need to wrap lvalue tree codes. */
2058 switch (TREE_CODE (x))
2060 case VAR_DECL:
2061 case PARM_DECL:
2062 case RESULT_DECL:
2063 case LABEL_DECL:
2064 case FUNCTION_DECL:
2065 case SSA_NAME:
2067 case COMPONENT_REF:
2068 case MEM_REF:
2069 case INDIRECT_REF:
2070 case ARRAY_REF:
2071 case ARRAY_RANGE_REF:
2072 case BIT_FIELD_REF:
2073 case OBJ_TYPE_REF:
2075 case REALPART_EXPR:
2076 case IMAGPART_EXPR:
2077 case PREINCREMENT_EXPR:
2078 case PREDECREMENT_EXPR:
2079 case SAVE_EXPR:
2080 case TRY_CATCH_EXPR:
2081 case WITH_CLEANUP_EXPR:
2082 case COMPOUND_EXPR:
2083 case MODIFY_EXPR:
2084 case TARGET_EXPR:
2085 case COND_EXPR:
2086 case BIND_EXPR:
2087 break;
2089 default:
2090 /* Assume the worst for front-end tree codes. */
2091 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2092 break;
2093 return false;
2096 return true;
2099 /* Return an expr equal to X but certainly not valid as an lvalue. */
2101 tree
2102 non_lvalue_loc (location_t loc, tree x)
2104 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2105 us. */
2106 if (in_gimple_form)
2107 return x;
2109 if (! maybe_lvalue_p (x))
2110 return x;
2111 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2114 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2115 Zero means allow extended lvalues. */
2117 int pedantic_lvalues;
2119 /* When pedantic, return an expr equal to X but certainly not valid as a
2120 pedantic lvalue. Otherwise, return X. */
2122 static tree
2123 pedantic_non_lvalue_loc (location_t loc, tree x)
2125 if (pedantic_lvalues)
2126 return non_lvalue_loc (loc, x);
2128 return protected_set_expr_location_unshare (x, loc);
2131 /* Given a tree comparison code, return the code that is the logical inverse.
2132 It is generally not safe to do this for floating-point comparisons, except
2133 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2134 ERROR_MARK in this case. */
2136 enum tree_code
2137 invert_tree_comparison (enum tree_code code, bool honor_nans)
2139 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2140 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2141 return ERROR_MARK;
2143 switch (code)
2145 case EQ_EXPR:
2146 return NE_EXPR;
2147 case NE_EXPR:
2148 return EQ_EXPR;
2149 case GT_EXPR:
2150 return honor_nans ? UNLE_EXPR : LE_EXPR;
2151 case GE_EXPR:
2152 return honor_nans ? UNLT_EXPR : LT_EXPR;
2153 case LT_EXPR:
2154 return honor_nans ? UNGE_EXPR : GE_EXPR;
2155 case LE_EXPR:
2156 return honor_nans ? UNGT_EXPR : GT_EXPR;
2157 case LTGT_EXPR:
2158 return UNEQ_EXPR;
2159 case UNEQ_EXPR:
2160 return LTGT_EXPR;
2161 case UNGT_EXPR:
2162 return LE_EXPR;
2163 case UNGE_EXPR:
2164 return LT_EXPR;
2165 case UNLT_EXPR:
2166 return GE_EXPR;
2167 case UNLE_EXPR:
2168 return GT_EXPR;
2169 case ORDERED_EXPR:
2170 return UNORDERED_EXPR;
2171 case UNORDERED_EXPR:
2172 return ORDERED_EXPR;
2173 default:
2174 gcc_unreachable ();
2178 /* Similar, but return the comparison that results if the operands are
2179 swapped. This is safe for floating-point. */
2181 enum tree_code
2182 swap_tree_comparison (enum tree_code code)
2184 switch (code)
2186 case EQ_EXPR:
2187 case NE_EXPR:
2188 case ORDERED_EXPR:
2189 case UNORDERED_EXPR:
2190 case LTGT_EXPR:
2191 case UNEQ_EXPR:
2192 return code;
2193 case GT_EXPR:
2194 return LT_EXPR;
2195 case GE_EXPR:
2196 return LE_EXPR;
2197 case LT_EXPR:
2198 return GT_EXPR;
2199 case LE_EXPR:
2200 return GE_EXPR;
2201 case UNGT_EXPR:
2202 return UNLT_EXPR;
2203 case UNGE_EXPR:
2204 return UNLE_EXPR;
2205 case UNLT_EXPR:
2206 return UNGT_EXPR;
2207 case UNLE_EXPR:
2208 return UNGE_EXPR;
2209 default:
2210 gcc_unreachable ();
2215 /* Convert a comparison tree code from an enum tree_code representation
2216 into a compcode bit-based encoding. This function is the inverse of
2217 compcode_to_comparison. */
2219 static enum comparison_code
2220 comparison_to_compcode (enum tree_code code)
2222 switch (code)
2224 case LT_EXPR:
2225 return COMPCODE_LT;
2226 case EQ_EXPR:
2227 return COMPCODE_EQ;
2228 case LE_EXPR:
2229 return COMPCODE_LE;
2230 case GT_EXPR:
2231 return COMPCODE_GT;
2232 case NE_EXPR:
2233 return COMPCODE_NE;
2234 case GE_EXPR:
2235 return COMPCODE_GE;
2236 case ORDERED_EXPR:
2237 return COMPCODE_ORD;
2238 case UNORDERED_EXPR:
2239 return COMPCODE_UNORD;
2240 case UNLT_EXPR:
2241 return COMPCODE_UNLT;
2242 case UNEQ_EXPR:
2243 return COMPCODE_UNEQ;
2244 case UNLE_EXPR:
2245 return COMPCODE_UNLE;
2246 case UNGT_EXPR:
2247 return COMPCODE_UNGT;
2248 case LTGT_EXPR:
2249 return COMPCODE_LTGT;
2250 case UNGE_EXPR:
2251 return COMPCODE_UNGE;
2252 default:
2253 gcc_unreachable ();
2257 /* Convert a compcode bit-based encoding of a comparison operator back
2258 to GCC's enum tree_code representation. This function is the
2259 inverse of comparison_to_compcode. */
2261 static enum tree_code
2262 compcode_to_comparison (enum comparison_code code)
2264 switch (code)
2266 case COMPCODE_LT:
2267 return LT_EXPR;
2268 case COMPCODE_EQ:
2269 return EQ_EXPR;
2270 case COMPCODE_LE:
2271 return LE_EXPR;
2272 case COMPCODE_GT:
2273 return GT_EXPR;
2274 case COMPCODE_NE:
2275 return NE_EXPR;
2276 case COMPCODE_GE:
2277 return GE_EXPR;
2278 case COMPCODE_ORD:
2279 return ORDERED_EXPR;
2280 case COMPCODE_UNORD:
2281 return UNORDERED_EXPR;
2282 case COMPCODE_UNLT:
2283 return UNLT_EXPR;
2284 case COMPCODE_UNEQ:
2285 return UNEQ_EXPR;
2286 case COMPCODE_UNLE:
2287 return UNLE_EXPR;
2288 case COMPCODE_UNGT:
2289 return UNGT_EXPR;
2290 case COMPCODE_LTGT:
2291 return LTGT_EXPR;
2292 case COMPCODE_UNGE:
2293 return UNGE_EXPR;
2294 default:
2295 gcc_unreachable ();
2299 /* Return a tree for the comparison which is the combination of
2300 doing the AND or OR (depending on CODE) of the two operations LCODE
2301 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2302 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2303 if this makes the transformation invalid. */
2305 tree
2306 combine_comparisons (location_t loc,
2307 enum tree_code code, enum tree_code lcode,
2308 enum tree_code rcode, tree truth_type,
2309 tree ll_arg, tree lr_arg)
2311 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2312 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2313 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2314 int compcode;
2316 switch (code)
2318 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2319 compcode = lcompcode & rcompcode;
2320 break;
2322 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2323 compcode = lcompcode | rcompcode;
2324 break;
2326 default:
2327 return NULL_TREE;
2330 if (!honor_nans)
2332 /* Eliminate unordered comparisons, as well as LTGT and ORD
2333 which are not used unless the mode has NaNs. */
2334 compcode &= ~COMPCODE_UNORD;
2335 if (compcode == COMPCODE_LTGT)
2336 compcode = COMPCODE_NE;
2337 else if (compcode == COMPCODE_ORD)
2338 compcode = COMPCODE_TRUE;
2340 else if (flag_trapping_math)
2342 /* Check that the original operation and the optimized ones will trap
2343 under the same condition. */
2344 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2345 && (lcompcode != COMPCODE_EQ)
2346 && (lcompcode != COMPCODE_ORD);
2347 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2348 && (rcompcode != COMPCODE_EQ)
2349 && (rcompcode != COMPCODE_ORD);
2350 bool trap = (compcode & COMPCODE_UNORD) == 0
2351 && (compcode != COMPCODE_EQ)
2352 && (compcode != COMPCODE_ORD);
2354 /* In a short-circuited boolean expression the LHS might be
2355 such that the RHS, if evaluated, will never trap. For
2356 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2357 if neither x nor y is NaN. (This is a mixed blessing: for
2358 example, the expression above will never trap, hence
2359 optimizing it to x < y would be invalid). */
2360 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2361 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2362 rtrap = false;
2364 /* If the comparison was short-circuited, and only the RHS
2365 trapped, we may now generate a spurious trap. */
2366 if (rtrap && !ltrap
2367 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2368 return NULL_TREE;
2370 /* If we changed the conditions that cause a trap, we lose. */
2371 if ((ltrap || rtrap) != trap)
2372 return NULL_TREE;
2375 if (compcode == COMPCODE_TRUE)
2376 return constant_boolean_node (true, truth_type);
2377 else if (compcode == COMPCODE_FALSE)
2378 return constant_boolean_node (false, truth_type);
2379 else
2381 enum tree_code tcode;
2383 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2384 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2388 /* Return nonzero if two operands (typically of the same tree node)
2389 are necessarily equal. If either argument has side-effects this
2390 function returns zero. FLAGS modifies behavior as follows:
2392 If OEP_ONLY_CONST is set, only return nonzero for constants.
2393 This function tests whether the operands are indistinguishable;
2394 it does not test whether they are equal using C's == operation.
2395 The distinction is important for IEEE floating point, because
2396 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2397 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2399 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2400 even though it may hold multiple values during a function.
2401 This is because a GCC tree node guarantees that nothing else is
2402 executed between the evaluation of its "operands" (which may often
2403 be evaluated in arbitrary order). Hence if the operands themselves
2404 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2405 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2406 unset means assuming isochronic (or instantaneous) tree equivalence.
2407 Unless comparing arbitrary expression trees, such as from different
2408 statements, this flag can usually be left unset.
2410 If OEP_PURE_SAME is set, then pure functions with identical arguments
2411 are considered the same. It is used when the caller has other ways
2412 to ensure that global memory is unchanged in between. */
2415 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2417 /* If either is ERROR_MARK, they aren't equal. */
2418 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2419 || TREE_TYPE (arg0) == error_mark_node
2420 || TREE_TYPE (arg1) == error_mark_node)
2421 return 0;
2423 /* Similar, if either does not have a type (like a released SSA name),
2424 they aren't equal. */
2425 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2426 return 0;
2428 /* Check equality of integer constants before bailing out due to
2429 precision differences. */
2430 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2431 return tree_int_cst_equal (arg0, arg1);
2433 /* If both types don't have the same signedness, then we can't consider
2434 them equal. We must check this before the STRIP_NOPS calls
2435 because they may change the signedness of the arguments. As pointers
2436 strictly don't have a signedness, require either two pointers or
2437 two non-pointers as well. */
2438 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2439 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2440 return 0;
2442 /* We cannot consider pointers to different address space equal. */
2443 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2444 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2445 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2446 return 0;
2448 /* If both types don't have the same precision, then it is not safe
2449 to strip NOPs. */
2450 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2451 return 0;
2453 STRIP_NOPS (arg0);
2454 STRIP_NOPS (arg1);
2456 /* In case both args are comparisons but with different comparison
2457 code, try to swap the comparison operands of one arg to produce
2458 a match and compare that variant. */
2459 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2460 && COMPARISON_CLASS_P (arg0)
2461 && COMPARISON_CLASS_P (arg1))
2463 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2465 if (TREE_CODE (arg0) == swap_code)
2466 return operand_equal_p (TREE_OPERAND (arg0, 0),
2467 TREE_OPERAND (arg1, 1), flags)
2468 && operand_equal_p (TREE_OPERAND (arg0, 1),
2469 TREE_OPERAND (arg1, 0), flags);
2472 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2473 /* This is needed for conversions and for COMPONENT_REF.
2474 Might as well play it safe and always test this. */
2475 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2476 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2477 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2478 return 0;
2480 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2481 We don't care about side effects in that case because the SAVE_EXPR
2482 takes care of that for us. In all other cases, two expressions are
2483 equal if they have no side effects. If we have two identical
2484 expressions with side effects that should be treated the same due
2485 to the only side effects being identical SAVE_EXPR's, that will
2486 be detected in the recursive calls below.
2487 If we are taking an invariant address of two identical objects
2488 they are necessarily equal as well. */
2489 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2490 && (TREE_CODE (arg0) == SAVE_EXPR
2491 || (flags & OEP_CONSTANT_ADDRESS_OF)
2492 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2493 return 1;
2495 /* Next handle constant cases, those for which we can return 1 even
2496 if ONLY_CONST is set. */
2497 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2498 switch (TREE_CODE (arg0))
2500 case INTEGER_CST:
2501 return tree_int_cst_equal (arg0, arg1);
2503 case FIXED_CST:
2504 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2505 TREE_FIXED_CST (arg1));
2507 case REAL_CST:
2508 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2509 TREE_REAL_CST (arg1)))
2510 return 1;
2513 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2515 /* If we do not distinguish between signed and unsigned zero,
2516 consider them equal. */
2517 if (real_zerop (arg0) && real_zerop (arg1))
2518 return 1;
2520 return 0;
2522 case VECTOR_CST:
2524 unsigned i;
2526 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2527 return 0;
2529 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2531 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2532 VECTOR_CST_ELT (arg1, i), flags))
2533 return 0;
2535 return 1;
2538 case COMPLEX_CST:
2539 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2540 flags)
2541 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2542 flags));
2544 case STRING_CST:
2545 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2546 && ! memcmp (TREE_STRING_POINTER (arg0),
2547 TREE_STRING_POINTER (arg1),
2548 TREE_STRING_LENGTH (arg0)));
2550 case ADDR_EXPR:
2551 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2552 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2553 ? OEP_CONSTANT_ADDRESS_OF : 0);
2554 default:
2555 break;
2558 if (flags & OEP_ONLY_CONST)
2559 return 0;
2561 /* Define macros to test an operand from arg0 and arg1 for equality and a
2562 variant that allows null and views null as being different from any
2563 non-null value. In the latter case, if either is null, the both
2564 must be; otherwise, do the normal comparison. */
2565 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2566 TREE_OPERAND (arg1, N), flags)
2568 #define OP_SAME_WITH_NULL(N) \
2569 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2570 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2572 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2574 case tcc_unary:
2575 /* Two conversions are equal only if signedness and modes match. */
2576 switch (TREE_CODE (arg0))
2578 CASE_CONVERT:
2579 case FIX_TRUNC_EXPR:
2580 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2581 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2582 return 0;
2583 break;
2584 default:
2585 break;
2588 return OP_SAME (0);
2591 case tcc_comparison:
2592 case tcc_binary:
2593 if (OP_SAME (0) && OP_SAME (1))
2594 return 1;
2596 /* For commutative ops, allow the other order. */
2597 return (commutative_tree_code (TREE_CODE (arg0))
2598 && operand_equal_p (TREE_OPERAND (arg0, 0),
2599 TREE_OPERAND (arg1, 1), flags)
2600 && operand_equal_p (TREE_OPERAND (arg0, 1),
2601 TREE_OPERAND (arg1, 0), flags));
2603 case tcc_reference:
2604 /* If either of the pointer (or reference) expressions we are
2605 dereferencing contain a side effect, these cannot be equal,
2606 but their addresses can be. */
2607 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2608 && (TREE_SIDE_EFFECTS (arg0)
2609 || TREE_SIDE_EFFECTS (arg1)))
2610 return 0;
2612 switch (TREE_CODE (arg0))
2614 case INDIRECT_REF:
2615 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2616 return OP_SAME (0);
2618 case REALPART_EXPR:
2619 case IMAGPART_EXPR:
2620 return OP_SAME (0);
2622 case TARGET_MEM_REF:
2623 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2624 /* Require equal extra operands and then fall through to MEM_REF
2625 handling of the two common operands. */
2626 if (!OP_SAME_WITH_NULL (2)
2627 || !OP_SAME_WITH_NULL (3)
2628 || !OP_SAME_WITH_NULL (4))
2629 return 0;
2630 /* Fallthru. */
2631 case MEM_REF:
2632 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2633 /* Require equal access sizes, and similar pointer types.
2634 We can have incomplete types for array references of
2635 variable-sized arrays from the Fortran frontent
2636 though. */
2637 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2638 || (TYPE_SIZE (TREE_TYPE (arg0))
2639 && TYPE_SIZE (TREE_TYPE (arg1))
2640 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2641 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2642 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2643 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2644 && OP_SAME (0) && OP_SAME (1));
2646 case ARRAY_REF:
2647 case ARRAY_RANGE_REF:
2648 /* Operands 2 and 3 may be null.
2649 Compare the array index by value if it is constant first as we
2650 may have different types but same value here. */
2651 if (!OP_SAME (0))
2652 return 0;
2653 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2654 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2655 TREE_OPERAND (arg1, 1))
2656 || OP_SAME (1))
2657 && OP_SAME_WITH_NULL (2)
2658 && OP_SAME_WITH_NULL (3));
2660 case COMPONENT_REF:
2661 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2662 may be NULL when we're called to compare MEM_EXPRs. */
2663 if (!OP_SAME_WITH_NULL (0)
2664 || !OP_SAME (1))
2665 return 0;
2666 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2667 return OP_SAME_WITH_NULL (2);
2669 case BIT_FIELD_REF:
2670 if (!OP_SAME (0))
2671 return 0;
2672 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2673 return OP_SAME (1) && OP_SAME (2);
2675 default:
2676 return 0;
2679 case tcc_expression:
2680 switch (TREE_CODE (arg0))
2682 case ADDR_EXPR:
2683 case TRUTH_NOT_EXPR:
2684 return OP_SAME (0);
2686 case TRUTH_ANDIF_EXPR:
2687 case TRUTH_ORIF_EXPR:
2688 return OP_SAME (0) && OP_SAME (1);
2690 case FMA_EXPR:
2691 case WIDEN_MULT_PLUS_EXPR:
2692 case WIDEN_MULT_MINUS_EXPR:
2693 if (!OP_SAME (2))
2694 return 0;
2695 /* The multiplcation operands are commutative. */
2696 /* FALLTHRU */
2698 case TRUTH_AND_EXPR:
2699 case TRUTH_OR_EXPR:
2700 case TRUTH_XOR_EXPR:
2701 if (OP_SAME (0) && OP_SAME (1))
2702 return 1;
2704 /* Otherwise take into account this is a commutative operation. */
2705 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2706 TREE_OPERAND (arg1, 1), flags)
2707 && operand_equal_p (TREE_OPERAND (arg0, 1),
2708 TREE_OPERAND (arg1, 0), flags));
2710 case COND_EXPR:
2711 case VEC_COND_EXPR:
2712 case DOT_PROD_EXPR:
2713 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2715 default:
2716 return 0;
2719 case tcc_vl_exp:
2720 switch (TREE_CODE (arg0))
2722 case CALL_EXPR:
2723 /* If the CALL_EXPRs call different functions, then they
2724 clearly can not be equal. */
2725 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2726 flags))
2727 return 0;
2730 unsigned int cef = call_expr_flags (arg0);
2731 if (flags & OEP_PURE_SAME)
2732 cef &= ECF_CONST | ECF_PURE;
2733 else
2734 cef &= ECF_CONST;
2735 if (!cef)
2736 return 0;
2739 /* Now see if all the arguments are the same. */
2741 const_call_expr_arg_iterator iter0, iter1;
2742 const_tree a0, a1;
2743 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2744 a1 = first_const_call_expr_arg (arg1, &iter1);
2745 a0 && a1;
2746 a0 = next_const_call_expr_arg (&iter0),
2747 a1 = next_const_call_expr_arg (&iter1))
2748 if (! operand_equal_p (a0, a1, flags))
2749 return 0;
2751 /* If we get here and both argument lists are exhausted
2752 then the CALL_EXPRs are equal. */
2753 return ! (a0 || a1);
2755 default:
2756 return 0;
2759 case tcc_declaration:
2760 /* Consider __builtin_sqrt equal to sqrt. */
2761 return (TREE_CODE (arg0) == FUNCTION_DECL
2762 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2763 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2764 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2766 default:
2767 return 0;
2770 #undef OP_SAME
2771 #undef OP_SAME_WITH_NULL
2774 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2775 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2777 When in doubt, return 0. */
2779 static int
2780 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2782 int unsignedp1, unsignedpo;
2783 tree primarg0, primarg1, primother;
2784 unsigned int correct_width;
2786 if (operand_equal_p (arg0, arg1, 0))
2787 return 1;
2789 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2790 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2791 return 0;
2793 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2794 and see if the inner values are the same. This removes any
2795 signedness comparison, which doesn't matter here. */
2796 primarg0 = arg0, primarg1 = arg1;
2797 STRIP_NOPS (primarg0);
2798 STRIP_NOPS (primarg1);
2799 if (operand_equal_p (primarg0, primarg1, 0))
2800 return 1;
2802 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2803 actual comparison operand, ARG0.
2805 First throw away any conversions to wider types
2806 already present in the operands. */
2808 primarg1 = get_narrower (arg1, &unsignedp1);
2809 primother = get_narrower (other, &unsignedpo);
2811 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2812 if (unsignedp1 == unsignedpo
2813 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2814 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2816 tree type = TREE_TYPE (arg0);
2818 /* Make sure shorter operand is extended the right way
2819 to match the longer operand. */
2820 primarg1 = fold_convert (signed_or_unsigned_type_for
2821 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2823 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2824 return 1;
2827 return 0;
2830 /* See if ARG is an expression that is either a comparison or is performing
2831 arithmetic on comparisons. The comparisons must only be comparing
2832 two different values, which will be stored in *CVAL1 and *CVAL2; if
2833 they are nonzero it means that some operands have already been found.
2834 No variables may be used anywhere else in the expression except in the
2835 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2836 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2838 If this is true, return 1. Otherwise, return zero. */
2840 static int
2841 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2843 enum tree_code code = TREE_CODE (arg);
2844 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2846 /* We can handle some of the tcc_expression cases here. */
2847 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2848 tclass = tcc_unary;
2849 else if (tclass == tcc_expression
2850 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2851 || code == COMPOUND_EXPR))
2852 tclass = tcc_binary;
2854 else if (tclass == tcc_expression && code == SAVE_EXPR
2855 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2857 /* If we've already found a CVAL1 or CVAL2, this expression is
2858 two complex to handle. */
2859 if (*cval1 || *cval2)
2860 return 0;
2862 tclass = tcc_unary;
2863 *save_p = 1;
2866 switch (tclass)
2868 case tcc_unary:
2869 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2871 case tcc_binary:
2872 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2873 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2874 cval1, cval2, save_p));
2876 case tcc_constant:
2877 return 1;
2879 case tcc_expression:
2880 if (code == COND_EXPR)
2881 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2882 cval1, cval2, save_p)
2883 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2884 cval1, cval2, save_p)
2885 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2886 cval1, cval2, save_p));
2887 return 0;
2889 case tcc_comparison:
2890 /* First see if we can handle the first operand, then the second. For
2891 the second operand, we know *CVAL1 can't be zero. It must be that
2892 one side of the comparison is each of the values; test for the
2893 case where this isn't true by failing if the two operands
2894 are the same. */
2896 if (operand_equal_p (TREE_OPERAND (arg, 0),
2897 TREE_OPERAND (arg, 1), 0))
2898 return 0;
2900 if (*cval1 == 0)
2901 *cval1 = TREE_OPERAND (arg, 0);
2902 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2904 else if (*cval2 == 0)
2905 *cval2 = TREE_OPERAND (arg, 0);
2906 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2908 else
2909 return 0;
2911 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2913 else if (*cval2 == 0)
2914 *cval2 = TREE_OPERAND (arg, 1);
2915 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2917 else
2918 return 0;
2920 return 1;
2922 default:
2923 return 0;
2927 /* ARG is a tree that is known to contain just arithmetic operations and
2928 comparisons. Evaluate the operations in the tree substituting NEW0 for
2929 any occurrence of OLD0 as an operand of a comparison and likewise for
2930 NEW1 and OLD1. */
2932 static tree
2933 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2934 tree old1, tree new1)
2936 tree type = TREE_TYPE (arg);
2937 enum tree_code code = TREE_CODE (arg);
2938 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2940 /* We can handle some of the tcc_expression cases here. */
2941 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2942 tclass = tcc_unary;
2943 else if (tclass == tcc_expression
2944 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2945 tclass = tcc_binary;
2947 switch (tclass)
2949 case tcc_unary:
2950 return fold_build1_loc (loc, code, type,
2951 eval_subst (loc, TREE_OPERAND (arg, 0),
2952 old0, new0, old1, new1));
2954 case tcc_binary:
2955 return fold_build2_loc (loc, code, type,
2956 eval_subst (loc, TREE_OPERAND (arg, 0),
2957 old0, new0, old1, new1),
2958 eval_subst (loc, TREE_OPERAND (arg, 1),
2959 old0, new0, old1, new1));
2961 case tcc_expression:
2962 switch (code)
2964 case SAVE_EXPR:
2965 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2966 old1, new1);
2968 case COMPOUND_EXPR:
2969 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2970 old1, new1);
2972 case COND_EXPR:
2973 return fold_build3_loc (loc, code, type,
2974 eval_subst (loc, TREE_OPERAND (arg, 0),
2975 old0, new0, old1, new1),
2976 eval_subst (loc, TREE_OPERAND (arg, 1),
2977 old0, new0, old1, new1),
2978 eval_subst (loc, TREE_OPERAND (arg, 2),
2979 old0, new0, old1, new1));
2980 default:
2981 break;
2983 /* Fall through - ??? */
2985 case tcc_comparison:
2987 tree arg0 = TREE_OPERAND (arg, 0);
2988 tree arg1 = TREE_OPERAND (arg, 1);
2990 /* We need to check both for exact equality and tree equality. The
2991 former will be true if the operand has a side-effect. In that
2992 case, we know the operand occurred exactly once. */
2994 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2995 arg0 = new0;
2996 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2997 arg0 = new1;
2999 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3000 arg1 = new0;
3001 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3002 arg1 = new1;
3004 return fold_build2_loc (loc, code, type, arg0, arg1);
3007 default:
3008 return arg;
3012 /* Return a tree for the case when the result of an expression is RESULT
3013 converted to TYPE and OMITTED was previously an operand of the expression
3014 but is now not needed (e.g., we folded OMITTED * 0).
3016 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3017 the conversion of RESULT to TYPE. */
3019 tree
3020 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3022 tree t = fold_convert_loc (loc, type, result);
3024 /* If the resulting operand is an empty statement, just return the omitted
3025 statement casted to void. */
3026 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3027 return build1_loc (loc, NOP_EXPR, void_type_node,
3028 fold_ignored_result (omitted));
3030 if (TREE_SIDE_EFFECTS (omitted))
3031 return build2_loc (loc, COMPOUND_EXPR, type,
3032 fold_ignored_result (omitted), t);
3034 return non_lvalue_loc (loc, t);
3037 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3039 static tree
3040 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3041 tree omitted)
3043 tree t = fold_convert_loc (loc, type, result);
3045 /* If the resulting operand is an empty statement, just return the omitted
3046 statement casted to void. */
3047 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3048 return build1_loc (loc, NOP_EXPR, void_type_node,
3049 fold_ignored_result (omitted));
3051 if (TREE_SIDE_EFFECTS (omitted))
3052 return build2_loc (loc, COMPOUND_EXPR, type,
3053 fold_ignored_result (omitted), t);
3055 return pedantic_non_lvalue_loc (loc, t);
3058 /* Return a tree for the case when the result of an expression is RESULT
3059 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3060 of the expression but are now not needed.
3062 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3063 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3064 evaluated before OMITTED2. Otherwise, if neither has side effects,
3065 just do the conversion of RESULT to TYPE. */
3067 tree
3068 omit_two_operands_loc (location_t loc, tree type, tree result,
3069 tree omitted1, tree omitted2)
3071 tree t = fold_convert_loc (loc, type, result);
3073 if (TREE_SIDE_EFFECTS (omitted2))
3074 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3075 if (TREE_SIDE_EFFECTS (omitted1))
3076 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3078 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3082 /* Return a simplified tree node for the truth-negation of ARG. This
3083 never alters ARG itself. We assume that ARG is an operation that
3084 returns a truth value (0 or 1).
3086 FIXME: one would think we would fold the result, but it causes
3087 problems with the dominator optimizer. */
3089 tree
3090 fold_truth_not_expr (location_t loc, tree arg)
3092 tree type = TREE_TYPE (arg);
3093 enum tree_code code = TREE_CODE (arg);
3094 location_t loc1, loc2;
3096 /* If this is a comparison, we can simply invert it, except for
3097 floating-point non-equality comparisons, in which case we just
3098 enclose a TRUTH_NOT_EXPR around what we have. */
3100 if (TREE_CODE_CLASS (code) == tcc_comparison)
3102 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3103 if (FLOAT_TYPE_P (op_type)
3104 && flag_trapping_math
3105 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3106 && code != NE_EXPR && code != EQ_EXPR)
3107 return NULL_TREE;
3109 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3110 if (code == ERROR_MARK)
3111 return NULL_TREE;
3113 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3114 TREE_OPERAND (arg, 1));
3117 switch (code)
3119 case INTEGER_CST:
3120 return constant_boolean_node (integer_zerop (arg), type);
3122 case TRUTH_AND_EXPR:
3123 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3124 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3125 return build2_loc (loc, TRUTH_OR_EXPR, type,
3126 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3127 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3129 case TRUTH_OR_EXPR:
3130 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3131 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3132 return build2_loc (loc, TRUTH_AND_EXPR, type,
3133 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3134 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3136 case TRUTH_XOR_EXPR:
3137 /* Here we can invert either operand. We invert the first operand
3138 unless the second operand is a TRUTH_NOT_EXPR in which case our
3139 result is the XOR of the first operand with the inside of the
3140 negation of the second operand. */
3142 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3143 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3144 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3145 else
3146 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3147 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3148 TREE_OPERAND (arg, 1));
3150 case TRUTH_ANDIF_EXPR:
3151 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3152 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3153 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3154 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3155 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3157 case TRUTH_ORIF_EXPR:
3158 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3159 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3160 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3161 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3162 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3164 case TRUTH_NOT_EXPR:
3165 return TREE_OPERAND (arg, 0);
3167 case COND_EXPR:
3169 tree arg1 = TREE_OPERAND (arg, 1);
3170 tree arg2 = TREE_OPERAND (arg, 2);
3172 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3173 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3175 /* A COND_EXPR may have a throw as one operand, which
3176 then has void type. Just leave void operands
3177 as they are. */
3178 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3179 VOID_TYPE_P (TREE_TYPE (arg1))
3180 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3181 VOID_TYPE_P (TREE_TYPE (arg2))
3182 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3185 case COMPOUND_EXPR:
3186 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3187 return build2_loc (loc, COMPOUND_EXPR, type,
3188 TREE_OPERAND (arg, 0),
3189 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3191 case NON_LVALUE_EXPR:
3192 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3193 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3195 CASE_CONVERT:
3196 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3197 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3199 /* ... fall through ... */
3201 case FLOAT_EXPR:
3202 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3203 return build1_loc (loc, TREE_CODE (arg), type,
3204 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3206 case BIT_AND_EXPR:
3207 if (!integer_onep (TREE_OPERAND (arg, 1)))
3208 return NULL_TREE;
3209 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3211 case SAVE_EXPR:
3212 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3214 case CLEANUP_POINT_EXPR:
3215 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3216 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3217 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3219 default:
3220 return NULL_TREE;
3224 /* Return a simplified tree node for the truth-negation of ARG. This
3225 never alters ARG itself. We assume that ARG is an operation that
3226 returns a truth value (0 or 1).
3228 FIXME: one would think we would fold the result, but it causes
3229 problems with the dominator optimizer. */
3231 tree
3232 invert_truthvalue_loc (location_t loc, tree arg)
3234 tree tem;
3236 if (TREE_CODE (arg) == ERROR_MARK)
3237 return arg;
3239 tem = fold_truth_not_expr (loc, arg);
3240 if (!tem)
3241 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3243 return tem;
3246 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3247 operands are another bit-wise operation with a common input. If so,
3248 distribute the bit operations to save an operation and possibly two if
3249 constants are involved. For example, convert
3250 (A | B) & (A | C) into A | (B & C)
3251 Further simplification will occur if B and C are constants.
3253 If this optimization cannot be done, 0 will be returned. */
3255 static tree
3256 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3257 tree arg0, tree arg1)
3259 tree common;
3260 tree left, right;
3262 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3263 || TREE_CODE (arg0) == code
3264 || (TREE_CODE (arg0) != BIT_AND_EXPR
3265 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3266 return 0;
3268 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3270 common = TREE_OPERAND (arg0, 0);
3271 left = TREE_OPERAND (arg0, 1);
3272 right = TREE_OPERAND (arg1, 1);
3274 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3276 common = TREE_OPERAND (arg0, 0);
3277 left = TREE_OPERAND (arg0, 1);
3278 right = TREE_OPERAND (arg1, 0);
3280 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3282 common = TREE_OPERAND (arg0, 1);
3283 left = TREE_OPERAND (arg0, 0);
3284 right = TREE_OPERAND (arg1, 1);
3286 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3288 common = TREE_OPERAND (arg0, 1);
3289 left = TREE_OPERAND (arg0, 0);
3290 right = TREE_OPERAND (arg1, 0);
3292 else
3293 return 0;
3295 common = fold_convert_loc (loc, type, common);
3296 left = fold_convert_loc (loc, type, left);
3297 right = fold_convert_loc (loc, type, right);
3298 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3299 fold_build2_loc (loc, code, type, left, right));
3302 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3303 with code CODE. This optimization is unsafe. */
3304 static tree
3305 distribute_real_division (location_t loc, enum tree_code code, tree type,
3306 tree arg0, tree arg1)
3308 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3309 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3311 /* (A / C) +- (B / C) -> (A +- B) / C. */
3312 if (mul0 == mul1
3313 && operand_equal_p (TREE_OPERAND (arg0, 1),
3314 TREE_OPERAND (arg1, 1), 0))
3315 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3316 fold_build2_loc (loc, code, type,
3317 TREE_OPERAND (arg0, 0),
3318 TREE_OPERAND (arg1, 0)),
3319 TREE_OPERAND (arg0, 1));
3321 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3322 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3323 TREE_OPERAND (arg1, 0), 0)
3324 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3325 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3327 REAL_VALUE_TYPE r0, r1;
3328 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3329 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3330 if (!mul0)
3331 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3332 if (!mul1)
3333 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3334 real_arithmetic (&r0, code, &r0, &r1);
3335 return fold_build2_loc (loc, MULT_EXPR, type,
3336 TREE_OPERAND (arg0, 0),
3337 build_real (type, r0));
3340 return NULL_TREE;
3343 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3344 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3346 static tree
3347 make_bit_field_ref (location_t loc, tree inner, tree type,
3348 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3350 tree result, bftype;
3352 if (bitpos == 0)
3354 tree size = TYPE_SIZE (TREE_TYPE (inner));
3355 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3356 || POINTER_TYPE_P (TREE_TYPE (inner)))
3357 && host_integerp (size, 0)
3358 && tree_low_cst (size, 0) == bitsize)
3359 return fold_convert_loc (loc, type, inner);
3362 bftype = type;
3363 if (TYPE_PRECISION (bftype) != bitsize
3364 || TYPE_UNSIGNED (bftype) == !unsignedp)
3365 bftype = build_nonstandard_integer_type (bitsize, 0);
3367 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3368 size_int (bitsize), bitsize_int (bitpos));
3370 if (bftype != type)
3371 result = fold_convert_loc (loc, type, result);
3373 return result;
3376 /* Optimize a bit-field compare.
3378 There are two cases: First is a compare against a constant and the
3379 second is a comparison of two items where the fields are at the same
3380 bit position relative to the start of a chunk (byte, halfword, word)
3381 large enough to contain it. In these cases we can avoid the shift
3382 implicit in bitfield extractions.
3384 For constants, we emit a compare of the shifted constant with the
3385 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3386 compared. For two fields at the same position, we do the ANDs with the
3387 similar mask and compare the result of the ANDs.
3389 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3390 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3391 are the left and right operands of the comparison, respectively.
3393 If the optimization described above can be done, we return the resulting
3394 tree. Otherwise we return zero. */
3396 static tree
3397 optimize_bit_field_compare (location_t loc, enum tree_code code,
3398 tree compare_type, tree lhs, tree rhs)
3400 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3401 tree type = TREE_TYPE (lhs);
3402 tree signed_type, unsigned_type;
3403 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3404 enum machine_mode lmode, rmode, nmode;
3405 int lunsignedp, runsignedp;
3406 int lvolatilep = 0, rvolatilep = 0;
3407 tree linner, rinner = NULL_TREE;
3408 tree mask;
3409 tree offset;
3411 /* In the strict volatile bitfields case, doing code changes here may prevent
3412 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3413 if (flag_strict_volatile_bitfields > 0)
3414 return 0;
3416 /* Get all the information about the extractions being done. If the bit size
3417 if the same as the size of the underlying object, we aren't doing an
3418 extraction at all and so can do nothing. We also don't want to
3419 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3420 then will no longer be able to replace it. */
3421 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3422 &lunsignedp, &lvolatilep, false);
3423 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3424 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3425 return 0;
3427 if (!const_p)
3429 /* If this is not a constant, we can only do something if bit positions,
3430 sizes, and signedness are the same. */
3431 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3432 &runsignedp, &rvolatilep, false);
3434 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3435 || lunsignedp != runsignedp || offset != 0
3436 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3437 return 0;
3440 /* See if we can find a mode to refer to this field. We should be able to,
3441 but fail if we can't. */
3442 if (lvolatilep
3443 && GET_MODE_BITSIZE (lmode) > 0
3444 && flag_strict_volatile_bitfields > 0)
3445 nmode = lmode;
3446 else
3447 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3448 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3449 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3450 TYPE_ALIGN (TREE_TYPE (rinner))),
3451 word_mode, lvolatilep || rvolatilep);
3452 if (nmode == VOIDmode)
3453 return 0;
3455 /* Set signed and unsigned types of the precision of this mode for the
3456 shifts below. */
3457 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3458 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3460 /* Compute the bit position and size for the new reference and our offset
3461 within it. If the new reference is the same size as the original, we
3462 won't optimize anything, so return zero. */
3463 nbitsize = GET_MODE_BITSIZE (nmode);
3464 nbitpos = lbitpos & ~ (nbitsize - 1);
3465 lbitpos -= nbitpos;
3466 if (nbitsize == lbitsize)
3467 return 0;
3469 if (BYTES_BIG_ENDIAN)
3470 lbitpos = nbitsize - lbitsize - lbitpos;
3472 /* Make the mask to be used against the extracted field. */
3473 mask = build_int_cst_type (unsigned_type, -1);
3474 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3475 mask = const_binop (RSHIFT_EXPR, mask,
3476 size_int (nbitsize - lbitsize - lbitpos));
3478 if (! const_p)
3479 /* If not comparing with constant, just rework the comparison
3480 and return. */
3481 return fold_build2_loc (loc, code, compare_type,
3482 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3483 make_bit_field_ref (loc, linner,
3484 unsigned_type,
3485 nbitsize, nbitpos,
3487 mask),
3488 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3489 make_bit_field_ref (loc, rinner,
3490 unsigned_type,
3491 nbitsize, nbitpos,
3493 mask));
3495 /* Otherwise, we are handling the constant case. See if the constant is too
3496 big for the field. Warn and return a tree of for 0 (false) if so. We do
3497 this not only for its own sake, but to avoid having to test for this
3498 error case below. If we didn't, we might generate wrong code.
3500 For unsigned fields, the constant shifted right by the field length should
3501 be all zero. For signed fields, the high-order bits should agree with
3502 the sign bit. */
3504 if (lunsignedp)
3506 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3507 fold_convert_loc (loc,
3508 unsigned_type, rhs),
3509 size_int (lbitsize))))
3511 warning (0, "comparison is always %d due to width of bit-field",
3512 code == NE_EXPR);
3513 return constant_boolean_node (code == NE_EXPR, compare_type);
3516 else
3518 tree tem = const_binop (RSHIFT_EXPR,
3519 fold_convert_loc (loc, signed_type, rhs),
3520 size_int (lbitsize - 1));
3521 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3523 warning (0, "comparison is always %d due to width of bit-field",
3524 code == NE_EXPR);
3525 return constant_boolean_node (code == NE_EXPR, compare_type);
3529 /* Single-bit compares should always be against zero. */
3530 if (lbitsize == 1 && ! integer_zerop (rhs))
3532 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3533 rhs = build_int_cst (type, 0);
3536 /* Make a new bitfield reference, shift the constant over the
3537 appropriate number of bits and mask it with the computed mask
3538 (in case this was a signed field). If we changed it, make a new one. */
3539 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3540 if (lvolatilep)
3542 TREE_SIDE_EFFECTS (lhs) = 1;
3543 TREE_THIS_VOLATILE (lhs) = 1;
3546 rhs = const_binop (BIT_AND_EXPR,
3547 const_binop (LSHIFT_EXPR,
3548 fold_convert_loc (loc, unsigned_type, rhs),
3549 size_int (lbitpos)),
3550 mask);
3552 lhs = build2_loc (loc, code, compare_type,
3553 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3554 return lhs;
3557 /* Subroutine for fold_truth_andor_1: decode a field reference.
3559 If EXP is a comparison reference, we return the innermost reference.
3561 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3562 set to the starting bit number.
3564 If the innermost field can be completely contained in a mode-sized
3565 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3567 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3568 otherwise it is not changed.
3570 *PUNSIGNEDP is set to the signedness of the field.
3572 *PMASK is set to the mask used. This is either contained in a
3573 BIT_AND_EXPR or derived from the width of the field.
3575 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3577 Return 0 if this is not a component reference or is one that we can't
3578 do anything with. */
3580 static tree
3581 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3582 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3583 int *punsignedp, int *pvolatilep,
3584 tree *pmask, tree *pand_mask)
3586 tree outer_type = 0;
3587 tree and_mask = 0;
3588 tree mask, inner, offset;
3589 tree unsigned_type;
3590 unsigned int precision;
3592 /* All the optimizations using this function assume integer fields.
3593 There are problems with FP fields since the type_for_size call
3594 below can fail for, e.g., XFmode. */
3595 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3596 return 0;
3598 /* We are interested in the bare arrangement of bits, so strip everything
3599 that doesn't affect the machine mode. However, record the type of the
3600 outermost expression if it may matter below. */
3601 if (CONVERT_EXPR_P (exp)
3602 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3603 outer_type = TREE_TYPE (exp);
3604 STRIP_NOPS (exp);
3606 if (TREE_CODE (exp) == BIT_AND_EXPR)
3608 and_mask = TREE_OPERAND (exp, 1);
3609 exp = TREE_OPERAND (exp, 0);
3610 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3611 if (TREE_CODE (and_mask) != INTEGER_CST)
3612 return 0;
3615 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3616 punsignedp, pvolatilep, false);
3617 if ((inner == exp && and_mask == 0)
3618 || *pbitsize < 0 || offset != 0
3619 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3620 return 0;
3622 /* If the number of bits in the reference is the same as the bitsize of
3623 the outer type, then the outer type gives the signedness. Otherwise
3624 (in case of a small bitfield) the signedness is unchanged. */
3625 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3626 *punsignedp = TYPE_UNSIGNED (outer_type);
3628 /* Compute the mask to access the bitfield. */
3629 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3630 precision = TYPE_PRECISION (unsigned_type);
3632 mask = build_int_cst_type (unsigned_type, -1);
3634 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3635 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3637 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3638 if (and_mask != 0)
3639 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3640 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3642 *pmask = mask;
3643 *pand_mask = and_mask;
3644 return inner;
3647 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3648 bit positions. */
3650 static int
3651 all_ones_mask_p (const_tree mask, int size)
3653 tree type = TREE_TYPE (mask);
3654 unsigned int precision = TYPE_PRECISION (type);
3655 tree tmask;
3657 tmask = build_int_cst_type (signed_type_for (type), -1);
3659 return
3660 tree_int_cst_equal (mask,
3661 const_binop (RSHIFT_EXPR,
3662 const_binop (LSHIFT_EXPR, tmask,
3663 size_int (precision - size)),
3664 size_int (precision - size)));
3667 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3668 represents the sign bit of EXP's type. If EXP represents a sign
3669 or zero extension, also test VAL against the unextended type.
3670 The return value is the (sub)expression whose sign bit is VAL,
3671 or NULL_TREE otherwise. */
3673 static tree
3674 sign_bit_p (tree exp, const_tree val)
3676 unsigned HOST_WIDE_INT mask_lo, lo;
3677 HOST_WIDE_INT mask_hi, hi;
3678 int width;
3679 tree t;
3681 /* Tree EXP must have an integral type. */
3682 t = TREE_TYPE (exp);
3683 if (! INTEGRAL_TYPE_P (t))
3684 return NULL_TREE;
3686 /* Tree VAL must be an integer constant. */
3687 if (TREE_CODE (val) != INTEGER_CST
3688 || TREE_OVERFLOW (val))
3689 return NULL_TREE;
3691 width = TYPE_PRECISION (t);
3692 if (width > HOST_BITS_PER_WIDE_INT)
3694 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3695 lo = 0;
3697 mask_hi = ((unsigned HOST_WIDE_INT) -1
3698 >> (HOST_BITS_PER_DOUBLE_INT - width));
3699 mask_lo = -1;
3701 else
3703 hi = 0;
3704 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3706 mask_hi = 0;
3707 mask_lo = ((unsigned HOST_WIDE_INT) -1
3708 >> (HOST_BITS_PER_WIDE_INT - width));
3711 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3712 treat VAL as if it were unsigned. */
3713 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3714 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3715 return exp;
3717 /* Handle extension from a narrower type. */
3718 if (TREE_CODE (exp) == NOP_EXPR
3719 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3720 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3722 return NULL_TREE;
3725 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3726 to be evaluated unconditionally. */
3728 static int
3729 simple_operand_p (const_tree exp)
3731 /* Strip any conversions that don't change the machine mode. */
3732 STRIP_NOPS (exp);
3734 return (CONSTANT_CLASS_P (exp)
3735 || TREE_CODE (exp) == SSA_NAME
3736 || (DECL_P (exp)
3737 && ! TREE_ADDRESSABLE (exp)
3738 && ! TREE_THIS_VOLATILE (exp)
3739 && ! DECL_NONLOCAL (exp)
3740 /* Don't regard global variables as simple. They may be
3741 allocated in ways unknown to the compiler (shared memory,
3742 #pragma weak, etc). */
3743 && ! TREE_PUBLIC (exp)
3744 && ! DECL_EXTERNAL (exp)
3745 /* Loading a static variable is unduly expensive, but global
3746 registers aren't expensive. */
3747 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3750 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3751 to be evaluated unconditionally.
3752 I addition to simple_operand_p, we assume that comparisons, conversions,
3753 and logic-not operations are simple, if their operands are simple, too. */
3755 static bool
3756 simple_operand_p_2 (tree exp)
3758 enum tree_code code;
3760 if (TREE_SIDE_EFFECTS (exp)
3761 || tree_could_trap_p (exp))
3762 return false;
3764 while (CONVERT_EXPR_P (exp))
3765 exp = TREE_OPERAND (exp, 0);
3767 code = TREE_CODE (exp);
3769 if (TREE_CODE_CLASS (code) == tcc_comparison)
3770 return (simple_operand_p (TREE_OPERAND (exp, 0))
3771 && simple_operand_p (TREE_OPERAND (exp, 1)));
3773 if (code == TRUTH_NOT_EXPR)
3774 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3776 return simple_operand_p (exp);
3780 /* The following functions are subroutines to fold_range_test and allow it to
3781 try to change a logical combination of comparisons into a range test.
3783 For example, both
3784 X == 2 || X == 3 || X == 4 || X == 5
3786 X >= 2 && X <= 5
3787 are converted to
3788 (unsigned) (X - 2) <= 3
3790 We describe each set of comparisons as being either inside or outside
3791 a range, using a variable named like IN_P, and then describe the
3792 range with a lower and upper bound. If one of the bounds is omitted,
3793 it represents either the highest or lowest value of the type.
3795 In the comments below, we represent a range by two numbers in brackets
3796 preceded by a "+" to designate being inside that range, or a "-" to
3797 designate being outside that range, so the condition can be inverted by
3798 flipping the prefix. An omitted bound is represented by a "-". For
3799 example, "- [-, 10]" means being outside the range starting at the lowest
3800 possible value and ending at 10, in other words, being greater than 10.
3801 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3802 always false.
3804 We set up things so that the missing bounds are handled in a consistent
3805 manner so neither a missing bound nor "true" and "false" need to be
3806 handled using a special case. */
3808 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3809 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3810 and UPPER1_P are nonzero if the respective argument is an upper bound
3811 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3812 must be specified for a comparison. ARG1 will be converted to ARG0's
3813 type if both are specified. */
3815 static tree
3816 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3817 tree arg1, int upper1_p)
3819 tree tem;
3820 int result;
3821 int sgn0, sgn1;
3823 /* If neither arg represents infinity, do the normal operation.
3824 Else, if not a comparison, return infinity. Else handle the special
3825 comparison rules. Note that most of the cases below won't occur, but
3826 are handled for consistency. */
3828 if (arg0 != 0 && arg1 != 0)
3830 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3831 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3832 STRIP_NOPS (tem);
3833 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3836 if (TREE_CODE_CLASS (code) != tcc_comparison)
3837 return 0;
3839 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3840 for neither. In real maths, we cannot assume open ended ranges are
3841 the same. But, this is computer arithmetic, where numbers are finite.
3842 We can therefore make the transformation of any unbounded range with
3843 the value Z, Z being greater than any representable number. This permits
3844 us to treat unbounded ranges as equal. */
3845 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3846 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3847 switch (code)
3849 case EQ_EXPR:
3850 result = sgn0 == sgn1;
3851 break;
3852 case NE_EXPR:
3853 result = sgn0 != sgn1;
3854 break;
3855 case LT_EXPR:
3856 result = sgn0 < sgn1;
3857 break;
3858 case LE_EXPR:
3859 result = sgn0 <= sgn1;
3860 break;
3861 case GT_EXPR:
3862 result = sgn0 > sgn1;
3863 break;
3864 case GE_EXPR:
3865 result = sgn0 >= sgn1;
3866 break;
3867 default:
3868 gcc_unreachable ();
3871 return constant_boolean_node (result, type);
3874 /* Helper routine for make_range. Perform one step for it, return
3875 new expression if the loop should continue or NULL_TREE if it should
3876 stop. */
3878 tree
3879 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3880 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3881 bool *strict_overflow_p)
3883 tree arg0_type = TREE_TYPE (arg0);
3884 tree n_low, n_high, low = *p_low, high = *p_high;
3885 int in_p = *p_in_p, n_in_p;
3887 switch (code)
3889 case TRUTH_NOT_EXPR:
3890 /* We can only do something if the range is testing for zero. */
3891 if (low == NULL_TREE || high == NULL_TREE
3892 || ! integer_zerop (low) || ! integer_zerop (high))
3893 return NULL_TREE;
3894 *p_in_p = ! in_p;
3895 return arg0;
3897 case EQ_EXPR: case NE_EXPR:
3898 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3899 /* We can only do something if the range is testing for zero
3900 and if the second operand is an integer constant. Note that
3901 saying something is "in" the range we make is done by
3902 complementing IN_P since it will set in the initial case of
3903 being not equal to zero; "out" is leaving it alone. */
3904 if (low == NULL_TREE || high == NULL_TREE
3905 || ! integer_zerop (low) || ! integer_zerop (high)
3906 || TREE_CODE (arg1) != INTEGER_CST)
3907 return NULL_TREE;
3909 switch (code)
3911 case NE_EXPR: /* - [c, c] */
3912 low = high = arg1;
3913 break;
3914 case EQ_EXPR: /* + [c, c] */
3915 in_p = ! in_p, low = high = arg1;
3916 break;
3917 case GT_EXPR: /* - [-, c] */
3918 low = 0, high = arg1;
3919 break;
3920 case GE_EXPR: /* + [c, -] */
3921 in_p = ! in_p, low = arg1, high = 0;
3922 break;
3923 case LT_EXPR: /* - [c, -] */
3924 low = arg1, high = 0;
3925 break;
3926 case LE_EXPR: /* + [-, c] */
3927 in_p = ! in_p, low = 0, high = arg1;
3928 break;
3929 default:
3930 gcc_unreachable ();
3933 /* If this is an unsigned comparison, we also know that EXP is
3934 greater than or equal to zero. We base the range tests we make
3935 on that fact, so we record it here so we can parse existing
3936 range tests. We test arg0_type since often the return type
3937 of, e.g. EQ_EXPR, is boolean. */
3938 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3940 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3941 in_p, low, high, 1,
3942 build_int_cst (arg0_type, 0),
3943 NULL_TREE))
3944 return NULL_TREE;
3946 in_p = n_in_p, low = n_low, high = n_high;
3948 /* If the high bound is missing, but we have a nonzero low
3949 bound, reverse the range so it goes from zero to the low bound
3950 minus 1. */
3951 if (high == 0 && low && ! integer_zerop (low))
3953 in_p = ! in_p;
3954 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3955 integer_one_node, 0);
3956 low = build_int_cst (arg0_type, 0);
3960 *p_low = low;
3961 *p_high = high;
3962 *p_in_p = in_p;
3963 return arg0;
3965 case NEGATE_EXPR:
3966 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3967 low and high are non-NULL, then normalize will DTRT. */
3968 if (!TYPE_UNSIGNED (arg0_type)
3969 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3971 if (low == NULL_TREE)
3972 low = TYPE_MIN_VALUE (arg0_type);
3973 if (high == NULL_TREE)
3974 high = TYPE_MAX_VALUE (arg0_type);
3977 /* (-x) IN [a,b] -> x in [-b, -a] */
3978 n_low = range_binop (MINUS_EXPR, exp_type,
3979 build_int_cst (exp_type, 0),
3980 0, high, 1);
3981 n_high = range_binop (MINUS_EXPR, exp_type,
3982 build_int_cst (exp_type, 0),
3983 0, low, 0);
3984 if (n_high != 0 && TREE_OVERFLOW (n_high))
3985 return NULL_TREE;
3986 goto normalize;
3988 case BIT_NOT_EXPR:
3989 /* ~ X -> -X - 1 */
3990 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3991 build_int_cst (exp_type, 1));
3993 case PLUS_EXPR:
3994 case MINUS_EXPR:
3995 if (TREE_CODE (arg1) != INTEGER_CST)
3996 return NULL_TREE;
3998 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3999 move a constant to the other side. */
4000 if (!TYPE_UNSIGNED (arg0_type)
4001 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4002 return NULL_TREE;
4004 /* If EXP is signed, any overflow in the computation is undefined,
4005 so we don't worry about it so long as our computations on
4006 the bounds don't overflow. For unsigned, overflow is defined
4007 and this is exactly the right thing. */
4008 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4009 arg0_type, low, 0, arg1, 0);
4010 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4011 arg0_type, high, 1, arg1, 0);
4012 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4013 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4014 return NULL_TREE;
4016 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4017 *strict_overflow_p = true;
4019 normalize:
4020 /* Check for an unsigned range which has wrapped around the maximum
4021 value thus making n_high < n_low, and normalize it. */
4022 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4024 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4025 integer_one_node, 0);
4026 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4027 integer_one_node, 0);
4029 /* If the range is of the form +/- [ x+1, x ], we won't
4030 be able to normalize it. But then, it represents the
4031 whole range or the empty set, so make it
4032 +/- [ -, - ]. */
4033 if (tree_int_cst_equal (n_low, low)
4034 && tree_int_cst_equal (n_high, high))
4035 low = high = 0;
4036 else
4037 in_p = ! in_p;
4039 else
4040 low = n_low, high = n_high;
4042 *p_low = low;
4043 *p_high = high;
4044 *p_in_p = in_p;
4045 return arg0;
4047 CASE_CONVERT:
4048 case NON_LVALUE_EXPR:
4049 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4050 return NULL_TREE;
4052 if (! INTEGRAL_TYPE_P (arg0_type)
4053 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4054 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4055 return NULL_TREE;
4057 n_low = low, n_high = high;
4059 if (n_low != 0)
4060 n_low = fold_convert_loc (loc, arg0_type, n_low);
4062 if (n_high != 0)
4063 n_high = fold_convert_loc (loc, arg0_type, n_high);
4065 /* If we're converting arg0 from an unsigned type, to exp,
4066 a signed type, we will be doing the comparison as unsigned.
4067 The tests above have already verified that LOW and HIGH
4068 are both positive.
4070 So we have to ensure that we will handle large unsigned
4071 values the same way that the current signed bounds treat
4072 negative values. */
4074 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4076 tree high_positive;
4077 tree equiv_type;
4078 /* For fixed-point modes, we need to pass the saturating flag
4079 as the 2nd parameter. */
4080 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4081 equiv_type
4082 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4083 TYPE_SATURATING (arg0_type));
4084 else
4085 equiv_type
4086 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4088 /* A range without an upper bound is, naturally, unbounded.
4089 Since convert would have cropped a very large value, use
4090 the max value for the destination type. */
4091 high_positive
4092 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4093 : TYPE_MAX_VALUE (arg0_type);
4095 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4096 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4097 fold_convert_loc (loc, arg0_type,
4098 high_positive),
4099 build_int_cst (arg0_type, 1));
4101 /* If the low bound is specified, "and" the range with the
4102 range for which the original unsigned value will be
4103 positive. */
4104 if (low != 0)
4106 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4107 1, fold_convert_loc (loc, arg0_type,
4108 integer_zero_node),
4109 high_positive))
4110 return NULL_TREE;
4112 in_p = (n_in_p == in_p);
4114 else
4116 /* Otherwise, "or" the range with the range of the input
4117 that will be interpreted as negative. */
4118 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4119 1, fold_convert_loc (loc, arg0_type,
4120 integer_zero_node),
4121 high_positive))
4122 return NULL_TREE;
4124 in_p = (in_p != n_in_p);
4128 *p_low = n_low;
4129 *p_high = n_high;
4130 *p_in_p = in_p;
4131 return arg0;
4133 default:
4134 return NULL_TREE;
4138 /* Given EXP, a logical expression, set the range it is testing into
4139 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4140 actually being tested. *PLOW and *PHIGH will be made of the same
4141 type as the returned expression. If EXP is not a comparison, we
4142 will most likely not be returning a useful value and range. Set
4143 *STRICT_OVERFLOW_P to true if the return value is only valid
4144 because signed overflow is undefined; otherwise, do not change
4145 *STRICT_OVERFLOW_P. */
4147 tree
4148 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4149 bool *strict_overflow_p)
4151 enum tree_code code;
4152 tree arg0, arg1 = NULL_TREE;
4153 tree exp_type, nexp;
4154 int in_p;
4155 tree low, high;
4156 location_t loc = EXPR_LOCATION (exp);
4158 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4159 and see if we can refine the range. Some of the cases below may not
4160 happen, but it doesn't seem worth worrying about this. We "continue"
4161 the outer loop when we've changed something; otherwise we "break"
4162 the switch, which will "break" the while. */
4164 in_p = 0;
4165 low = high = build_int_cst (TREE_TYPE (exp), 0);
4167 while (1)
4169 code = TREE_CODE (exp);
4170 exp_type = TREE_TYPE (exp);
4171 arg0 = NULL_TREE;
4173 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4175 if (TREE_OPERAND_LENGTH (exp) > 0)
4176 arg0 = TREE_OPERAND (exp, 0);
4177 if (TREE_CODE_CLASS (code) == tcc_binary
4178 || TREE_CODE_CLASS (code) == tcc_comparison
4179 || (TREE_CODE_CLASS (code) == tcc_expression
4180 && TREE_OPERAND_LENGTH (exp) > 1))
4181 arg1 = TREE_OPERAND (exp, 1);
4183 if (arg0 == NULL_TREE)
4184 break;
4186 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4187 &high, &in_p, strict_overflow_p);
4188 if (nexp == NULL_TREE)
4189 break;
4190 exp = nexp;
4193 /* If EXP is a constant, we can evaluate whether this is true or false. */
4194 if (TREE_CODE (exp) == INTEGER_CST)
4196 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4197 exp, 0, low, 0))
4198 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4199 exp, 1, high, 1)));
4200 low = high = 0;
4201 exp = 0;
4204 *pin_p = in_p, *plow = low, *phigh = high;
4205 return exp;
4208 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4209 type, TYPE, return an expression to test if EXP is in (or out of, depending
4210 on IN_P) the range. Return 0 if the test couldn't be created. */
4212 tree
4213 build_range_check (location_t loc, tree type, tree exp, int in_p,
4214 tree low, tree high)
4216 tree etype = TREE_TYPE (exp), value;
4218 #ifdef HAVE_canonicalize_funcptr_for_compare
4219 /* Disable this optimization for function pointer expressions
4220 on targets that require function pointer canonicalization. */
4221 if (HAVE_canonicalize_funcptr_for_compare
4222 && TREE_CODE (etype) == POINTER_TYPE
4223 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4224 return NULL_TREE;
4225 #endif
4227 if (! in_p)
4229 value = build_range_check (loc, type, exp, 1, low, high);
4230 if (value != 0)
4231 return invert_truthvalue_loc (loc, value);
4233 return 0;
4236 if (low == 0 && high == 0)
4237 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4239 if (low == 0)
4240 return fold_build2_loc (loc, LE_EXPR, type, exp,
4241 fold_convert_loc (loc, etype, high));
4243 if (high == 0)
4244 return fold_build2_loc (loc, GE_EXPR, type, exp,
4245 fold_convert_loc (loc, etype, low));
4247 if (operand_equal_p (low, high, 0))
4248 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4249 fold_convert_loc (loc, etype, low));
4251 if (integer_zerop (low))
4253 if (! TYPE_UNSIGNED (etype))
4255 etype = unsigned_type_for (etype);
4256 high = fold_convert_loc (loc, etype, high);
4257 exp = fold_convert_loc (loc, etype, exp);
4259 return build_range_check (loc, type, exp, 1, 0, high);
4262 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4263 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4265 unsigned HOST_WIDE_INT lo;
4266 HOST_WIDE_INT hi;
4267 int prec;
4269 prec = TYPE_PRECISION (etype);
4270 if (prec <= HOST_BITS_PER_WIDE_INT)
4272 hi = 0;
4273 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4275 else
4277 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4278 lo = (unsigned HOST_WIDE_INT) -1;
4281 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4283 if (TYPE_UNSIGNED (etype))
4285 tree signed_etype = signed_type_for (etype);
4286 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4287 etype
4288 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4289 else
4290 etype = signed_etype;
4291 exp = fold_convert_loc (loc, etype, exp);
4293 return fold_build2_loc (loc, GT_EXPR, type, exp,
4294 build_int_cst (etype, 0));
4298 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4299 This requires wrap-around arithmetics for the type of the expression.
4300 First make sure that arithmetics in this type is valid, then make sure
4301 that it wraps around. */
4302 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4303 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4304 TYPE_UNSIGNED (etype));
4306 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4308 tree utype, minv, maxv;
4310 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4311 for the type in question, as we rely on this here. */
4312 utype = unsigned_type_for (etype);
4313 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4314 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4315 integer_one_node, 1);
4316 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4318 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4319 minv, 1, maxv, 1)))
4320 etype = utype;
4321 else
4322 return 0;
4325 high = fold_convert_loc (loc, etype, high);
4326 low = fold_convert_loc (loc, etype, low);
4327 exp = fold_convert_loc (loc, etype, exp);
4329 value = const_binop (MINUS_EXPR, high, low);
4332 if (POINTER_TYPE_P (etype))
4334 if (value != 0 && !TREE_OVERFLOW (value))
4336 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4337 return build_range_check (loc, type,
4338 fold_build_pointer_plus_loc (loc, exp, low),
4339 1, build_int_cst (etype, 0), value);
4341 return 0;
4344 if (value != 0 && !TREE_OVERFLOW (value))
4345 return build_range_check (loc, type,
4346 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4347 1, build_int_cst (etype, 0), value);
4349 return 0;
4352 /* Return the predecessor of VAL in its type, handling the infinite case. */
4354 static tree
4355 range_predecessor (tree val)
4357 tree type = TREE_TYPE (val);
4359 if (INTEGRAL_TYPE_P (type)
4360 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4361 return 0;
4362 else
4363 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4366 /* Return the successor of VAL in its type, handling the infinite case. */
4368 static tree
4369 range_successor (tree val)
4371 tree type = TREE_TYPE (val);
4373 if (INTEGRAL_TYPE_P (type)
4374 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4375 return 0;
4376 else
4377 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4380 /* Given two ranges, see if we can merge them into one. Return 1 if we
4381 can, 0 if we can't. Set the output range into the specified parameters. */
4383 bool
4384 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4385 tree high0, int in1_p, tree low1, tree high1)
4387 int no_overlap;
4388 int subset;
4389 int temp;
4390 tree tem;
4391 int in_p;
4392 tree low, high;
4393 int lowequal = ((low0 == 0 && low1 == 0)
4394 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4395 low0, 0, low1, 0)));
4396 int highequal = ((high0 == 0 && high1 == 0)
4397 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4398 high0, 1, high1, 1)));
4400 /* Make range 0 be the range that starts first, or ends last if they
4401 start at the same value. Swap them if it isn't. */
4402 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4403 low0, 0, low1, 0))
4404 || (lowequal
4405 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4406 high1, 1, high0, 1))))
4408 temp = in0_p, in0_p = in1_p, in1_p = temp;
4409 tem = low0, low0 = low1, low1 = tem;
4410 tem = high0, high0 = high1, high1 = tem;
4413 /* Now flag two cases, whether the ranges are disjoint or whether the
4414 second range is totally subsumed in the first. Note that the tests
4415 below are simplified by the ones above. */
4416 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4417 high0, 1, low1, 0));
4418 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4419 high1, 1, high0, 1));
4421 /* We now have four cases, depending on whether we are including or
4422 excluding the two ranges. */
4423 if (in0_p && in1_p)
4425 /* If they don't overlap, the result is false. If the second range
4426 is a subset it is the result. Otherwise, the range is from the start
4427 of the second to the end of the first. */
4428 if (no_overlap)
4429 in_p = 0, low = high = 0;
4430 else if (subset)
4431 in_p = 1, low = low1, high = high1;
4432 else
4433 in_p = 1, low = low1, high = high0;
4436 else if (in0_p && ! in1_p)
4438 /* If they don't overlap, the result is the first range. If they are
4439 equal, the result is false. If the second range is a subset of the
4440 first, and the ranges begin at the same place, we go from just after
4441 the end of the second range to the end of the first. If the second
4442 range is not a subset of the first, or if it is a subset and both
4443 ranges end at the same place, the range starts at the start of the
4444 first range and ends just before the second range.
4445 Otherwise, we can't describe this as a single range. */
4446 if (no_overlap)
4447 in_p = 1, low = low0, high = high0;
4448 else if (lowequal && highequal)
4449 in_p = 0, low = high = 0;
4450 else if (subset && lowequal)
4452 low = range_successor (high1);
4453 high = high0;
4454 in_p = 1;
4455 if (low == 0)
4457 /* We are in the weird situation where high0 > high1 but
4458 high1 has no successor. Punt. */
4459 return 0;
4462 else if (! subset || highequal)
4464 low = low0;
4465 high = range_predecessor (low1);
4466 in_p = 1;
4467 if (high == 0)
4469 /* low0 < low1 but low1 has no predecessor. Punt. */
4470 return 0;
4473 else
4474 return 0;
4477 else if (! in0_p && in1_p)
4479 /* If they don't overlap, the result is the second range. If the second
4480 is a subset of the first, the result is false. Otherwise,
4481 the range starts just after the first range and ends at the
4482 end of the second. */
4483 if (no_overlap)
4484 in_p = 1, low = low1, high = high1;
4485 else if (subset || highequal)
4486 in_p = 0, low = high = 0;
4487 else
4489 low = range_successor (high0);
4490 high = high1;
4491 in_p = 1;
4492 if (low == 0)
4494 /* high1 > high0 but high0 has no successor. Punt. */
4495 return 0;
4500 else
4502 /* The case where we are excluding both ranges. Here the complex case
4503 is if they don't overlap. In that case, the only time we have a
4504 range is if they are adjacent. If the second is a subset of the
4505 first, the result is the first. Otherwise, the range to exclude
4506 starts at the beginning of the first range and ends at the end of the
4507 second. */
4508 if (no_overlap)
4510 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4511 range_successor (high0),
4512 1, low1, 0)))
4513 in_p = 0, low = low0, high = high1;
4514 else
4516 /* Canonicalize - [min, x] into - [-, x]. */
4517 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4518 switch (TREE_CODE (TREE_TYPE (low0)))
4520 case ENUMERAL_TYPE:
4521 if (TYPE_PRECISION (TREE_TYPE (low0))
4522 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4523 break;
4524 /* FALLTHROUGH */
4525 case INTEGER_TYPE:
4526 if (tree_int_cst_equal (low0,
4527 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4528 low0 = 0;
4529 break;
4530 case POINTER_TYPE:
4531 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4532 && integer_zerop (low0))
4533 low0 = 0;
4534 break;
4535 default:
4536 break;
4539 /* Canonicalize - [x, max] into - [x, -]. */
4540 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4541 switch (TREE_CODE (TREE_TYPE (high1)))
4543 case ENUMERAL_TYPE:
4544 if (TYPE_PRECISION (TREE_TYPE (high1))
4545 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4546 break;
4547 /* FALLTHROUGH */
4548 case INTEGER_TYPE:
4549 if (tree_int_cst_equal (high1,
4550 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4551 high1 = 0;
4552 break;
4553 case POINTER_TYPE:
4554 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4555 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4556 high1, 1,
4557 integer_one_node, 1)))
4558 high1 = 0;
4559 break;
4560 default:
4561 break;
4564 /* The ranges might be also adjacent between the maximum and
4565 minimum values of the given type. For
4566 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4567 return + [x + 1, y - 1]. */
4568 if (low0 == 0 && high1 == 0)
4570 low = range_successor (high0);
4571 high = range_predecessor (low1);
4572 if (low == 0 || high == 0)
4573 return 0;
4575 in_p = 1;
4577 else
4578 return 0;
4581 else if (subset)
4582 in_p = 0, low = low0, high = high0;
4583 else
4584 in_p = 0, low = low0, high = high1;
4587 *pin_p = in_p, *plow = low, *phigh = high;
4588 return 1;
4592 /* Subroutine of fold, looking inside expressions of the form
4593 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4594 of the COND_EXPR. This function is being used also to optimize
4595 A op B ? C : A, by reversing the comparison first.
4597 Return a folded expression whose code is not a COND_EXPR
4598 anymore, or NULL_TREE if no folding opportunity is found. */
4600 static tree
4601 fold_cond_expr_with_comparison (location_t loc, tree type,
4602 tree arg0, tree arg1, tree arg2)
4604 enum tree_code comp_code = TREE_CODE (arg0);
4605 tree arg00 = TREE_OPERAND (arg0, 0);
4606 tree arg01 = TREE_OPERAND (arg0, 1);
4607 tree arg1_type = TREE_TYPE (arg1);
4608 tree tem;
4610 STRIP_NOPS (arg1);
4611 STRIP_NOPS (arg2);
4613 /* If we have A op 0 ? A : -A, consider applying the following
4614 transformations:
4616 A == 0? A : -A same as -A
4617 A != 0? A : -A same as A
4618 A >= 0? A : -A same as abs (A)
4619 A > 0? A : -A same as abs (A)
4620 A <= 0? A : -A same as -abs (A)
4621 A < 0? A : -A same as -abs (A)
4623 None of these transformations work for modes with signed
4624 zeros. If A is +/-0, the first two transformations will
4625 change the sign of the result (from +0 to -0, or vice
4626 versa). The last four will fix the sign of the result,
4627 even though the original expressions could be positive or
4628 negative, depending on the sign of A.
4630 Note that all these transformations are correct if A is
4631 NaN, since the two alternatives (A and -A) are also NaNs. */
4632 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4633 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4634 ? real_zerop (arg01)
4635 : integer_zerop (arg01))
4636 && ((TREE_CODE (arg2) == NEGATE_EXPR
4637 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4638 /* In the case that A is of the form X-Y, '-A' (arg2) may
4639 have already been folded to Y-X, check for that. */
4640 || (TREE_CODE (arg1) == MINUS_EXPR
4641 && TREE_CODE (arg2) == MINUS_EXPR
4642 && operand_equal_p (TREE_OPERAND (arg1, 0),
4643 TREE_OPERAND (arg2, 1), 0)
4644 && operand_equal_p (TREE_OPERAND (arg1, 1),
4645 TREE_OPERAND (arg2, 0), 0))))
4646 switch (comp_code)
4648 case EQ_EXPR:
4649 case UNEQ_EXPR:
4650 tem = fold_convert_loc (loc, arg1_type, arg1);
4651 return pedantic_non_lvalue_loc (loc,
4652 fold_convert_loc (loc, type,
4653 negate_expr (tem)));
4654 case NE_EXPR:
4655 case LTGT_EXPR:
4656 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4657 case UNGE_EXPR:
4658 case UNGT_EXPR:
4659 if (flag_trapping_math)
4660 break;
4661 /* Fall through. */
4662 case GE_EXPR:
4663 case GT_EXPR:
4664 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4665 arg1 = fold_convert_loc (loc, signed_type_for
4666 (TREE_TYPE (arg1)), arg1);
4667 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4668 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4669 case UNLE_EXPR:
4670 case UNLT_EXPR:
4671 if (flag_trapping_math)
4672 break;
4673 case LE_EXPR:
4674 case LT_EXPR:
4675 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4676 arg1 = fold_convert_loc (loc, signed_type_for
4677 (TREE_TYPE (arg1)), arg1);
4678 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4679 return negate_expr (fold_convert_loc (loc, type, tem));
4680 default:
4681 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4682 break;
4685 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4686 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4687 both transformations are correct when A is NaN: A != 0
4688 is then true, and A == 0 is false. */
4690 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4691 && integer_zerop (arg01) && integer_zerop (arg2))
4693 if (comp_code == NE_EXPR)
4694 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4695 else if (comp_code == EQ_EXPR)
4696 return build_int_cst (type, 0);
4699 /* Try some transformations of A op B ? A : B.
4701 A == B? A : B same as B
4702 A != B? A : B same as A
4703 A >= B? A : B same as max (A, B)
4704 A > B? A : B same as max (B, A)
4705 A <= B? A : B same as min (A, B)
4706 A < B? A : B same as min (B, A)
4708 As above, these transformations don't work in the presence
4709 of signed zeros. For example, if A and B are zeros of
4710 opposite sign, the first two transformations will change
4711 the sign of the result. In the last four, the original
4712 expressions give different results for (A=+0, B=-0) and
4713 (A=-0, B=+0), but the transformed expressions do not.
4715 The first two transformations are correct if either A or B
4716 is a NaN. In the first transformation, the condition will
4717 be false, and B will indeed be chosen. In the case of the
4718 second transformation, the condition A != B will be true,
4719 and A will be chosen.
4721 The conversions to max() and min() are not correct if B is
4722 a number and A is not. The conditions in the original
4723 expressions will be false, so all four give B. The min()
4724 and max() versions would give a NaN instead. */
4725 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4726 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4727 /* Avoid these transformations if the COND_EXPR may be used
4728 as an lvalue in the C++ front-end. PR c++/19199. */
4729 && (in_gimple_form
4730 || (strcmp (lang_hooks.name, "GNU C++") != 0
4731 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4732 || ! maybe_lvalue_p (arg1)
4733 || ! maybe_lvalue_p (arg2)))
4735 tree comp_op0 = arg00;
4736 tree comp_op1 = arg01;
4737 tree comp_type = TREE_TYPE (comp_op0);
4739 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4740 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4742 comp_type = type;
4743 comp_op0 = arg1;
4744 comp_op1 = arg2;
4747 switch (comp_code)
4749 case EQ_EXPR:
4750 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4751 case NE_EXPR:
4752 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4753 case LE_EXPR:
4754 case LT_EXPR:
4755 case UNLE_EXPR:
4756 case UNLT_EXPR:
4757 /* In C++ a ?: expression can be an lvalue, so put the
4758 operand which will be used if they are equal first
4759 so that we can convert this back to the
4760 corresponding COND_EXPR. */
4761 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4763 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4764 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4765 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4766 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4767 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4768 comp_op1, comp_op0);
4769 return pedantic_non_lvalue_loc (loc,
4770 fold_convert_loc (loc, type, tem));
4772 break;
4773 case GE_EXPR:
4774 case GT_EXPR:
4775 case UNGE_EXPR:
4776 case UNGT_EXPR:
4777 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4779 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4780 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4781 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4782 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4783 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4784 comp_op1, comp_op0);
4785 return pedantic_non_lvalue_loc (loc,
4786 fold_convert_loc (loc, type, tem));
4788 break;
4789 case UNEQ_EXPR:
4790 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4791 return pedantic_non_lvalue_loc (loc,
4792 fold_convert_loc (loc, type, arg2));
4793 break;
4794 case LTGT_EXPR:
4795 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4796 return pedantic_non_lvalue_loc (loc,
4797 fold_convert_loc (loc, type, arg1));
4798 break;
4799 default:
4800 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4801 break;
4805 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4806 we might still be able to simplify this. For example,
4807 if C1 is one less or one more than C2, this might have started
4808 out as a MIN or MAX and been transformed by this function.
4809 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4811 if (INTEGRAL_TYPE_P (type)
4812 && TREE_CODE (arg01) == INTEGER_CST
4813 && TREE_CODE (arg2) == INTEGER_CST)
4814 switch (comp_code)
4816 case EQ_EXPR:
4817 if (TREE_CODE (arg1) == INTEGER_CST)
4818 break;
4819 /* We can replace A with C1 in this case. */
4820 arg1 = fold_convert_loc (loc, type, arg01);
4821 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4823 case LT_EXPR:
4824 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4825 MIN_EXPR, to preserve the signedness of the comparison. */
4826 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4827 OEP_ONLY_CONST)
4828 && operand_equal_p (arg01,
4829 const_binop (PLUS_EXPR, arg2,
4830 build_int_cst (type, 1)),
4831 OEP_ONLY_CONST))
4833 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4834 fold_convert_loc (loc, TREE_TYPE (arg00),
4835 arg2));
4836 return pedantic_non_lvalue_loc (loc,
4837 fold_convert_loc (loc, type, tem));
4839 break;
4841 case LE_EXPR:
4842 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4843 as above. */
4844 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4845 OEP_ONLY_CONST)
4846 && operand_equal_p (arg01,
4847 const_binop (MINUS_EXPR, arg2,
4848 build_int_cst (type, 1)),
4849 OEP_ONLY_CONST))
4851 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4852 fold_convert_loc (loc, TREE_TYPE (arg00),
4853 arg2));
4854 return pedantic_non_lvalue_loc (loc,
4855 fold_convert_loc (loc, type, tem));
4857 break;
4859 case GT_EXPR:
4860 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4861 MAX_EXPR, to preserve the signedness of the comparison. */
4862 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4863 OEP_ONLY_CONST)
4864 && operand_equal_p (arg01,
4865 const_binop (MINUS_EXPR, arg2,
4866 build_int_cst (type, 1)),
4867 OEP_ONLY_CONST))
4869 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4870 fold_convert_loc (loc, TREE_TYPE (arg00),
4871 arg2));
4872 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4874 break;
4876 case GE_EXPR:
4877 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4878 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4879 OEP_ONLY_CONST)
4880 && operand_equal_p (arg01,
4881 const_binop (PLUS_EXPR, arg2,
4882 build_int_cst (type, 1)),
4883 OEP_ONLY_CONST))
4885 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4886 fold_convert_loc (loc, TREE_TYPE (arg00),
4887 arg2));
4888 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4890 break;
4891 case NE_EXPR:
4892 break;
4893 default:
4894 gcc_unreachable ();
4897 return NULL_TREE;
4902 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4903 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4904 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4905 false) >= 2)
4906 #endif
4908 /* EXP is some logical combination of boolean tests. See if we can
4909 merge it into some range test. Return the new tree if so. */
4911 static tree
4912 fold_range_test (location_t loc, enum tree_code code, tree type,
4913 tree op0, tree op1)
4915 int or_op = (code == TRUTH_ORIF_EXPR
4916 || code == TRUTH_OR_EXPR);
4917 int in0_p, in1_p, in_p;
4918 tree low0, low1, low, high0, high1, high;
4919 bool strict_overflow_p = false;
4920 tree tem, lhs, rhs;
4921 const char * const warnmsg = G_("assuming signed overflow does not occur "
4922 "when simplifying range test");
4924 if (!INTEGRAL_TYPE_P (type))
4925 return 0;
4927 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4928 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4930 /* If this is an OR operation, invert both sides; we will invert
4931 again at the end. */
4932 if (or_op)
4933 in0_p = ! in0_p, in1_p = ! in1_p;
4935 /* If both expressions are the same, if we can merge the ranges, and we
4936 can build the range test, return it or it inverted. If one of the
4937 ranges is always true or always false, consider it to be the same
4938 expression as the other. */
4939 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4940 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4941 in1_p, low1, high1)
4942 && 0 != (tem = (build_range_check (loc, type,
4943 lhs != 0 ? lhs
4944 : rhs != 0 ? rhs : integer_zero_node,
4945 in_p, low, high))))
4947 if (strict_overflow_p)
4948 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4949 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4952 /* On machines where the branch cost is expensive, if this is a
4953 short-circuited branch and the underlying object on both sides
4954 is the same, make a non-short-circuit operation. */
4955 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4956 && lhs != 0 && rhs != 0
4957 && (code == TRUTH_ANDIF_EXPR
4958 || code == TRUTH_ORIF_EXPR)
4959 && operand_equal_p (lhs, rhs, 0))
4961 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4962 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4963 which cases we can't do this. */
4964 if (simple_operand_p (lhs))
4965 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4966 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4967 type, op0, op1);
4969 else if (!lang_hooks.decls.global_bindings_p ()
4970 && !CONTAINS_PLACEHOLDER_P (lhs))
4972 tree common = save_expr (lhs);
4974 if (0 != (lhs = build_range_check (loc, type, common,
4975 or_op ? ! in0_p : in0_p,
4976 low0, high0))
4977 && (0 != (rhs = build_range_check (loc, type, common,
4978 or_op ? ! in1_p : in1_p,
4979 low1, high1))))
4981 if (strict_overflow_p)
4982 fold_overflow_warning (warnmsg,
4983 WARN_STRICT_OVERFLOW_COMPARISON);
4984 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4985 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4986 type, lhs, rhs);
4991 return 0;
4994 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4995 bit value. Arrange things so the extra bits will be set to zero if and
4996 only if C is signed-extended to its full width. If MASK is nonzero,
4997 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4999 static tree
5000 unextend (tree c, int p, int unsignedp, tree mask)
5002 tree type = TREE_TYPE (c);
5003 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5004 tree temp;
5006 if (p == modesize || unsignedp)
5007 return c;
5009 /* We work by getting just the sign bit into the low-order bit, then
5010 into the high-order bit, then sign-extend. We then XOR that value
5011 with C. */
5012 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5013 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5015 /* We must use a signed type in order to get an arithmetic right shift.
5016 However, we must also avoid introducing accidental overflows, so that
5017 a subsequent call to integer_zerop will work. Hence we must
5018 do the type conversion here. At this point, the constant is either
5019 zero or one, and the conversion to a signed type can never overflow.
5020 We could get an overflow if this conversion is done anywhere else. */
5021 if (TYPE_UNSIGNED (type))
5022 temp = fold_convert (signed_type_for (type), temp);
5024 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5025 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5026 if (mask != 0)
5027 temp = const_binop (BIT_AND_EXPR, temp,
5028 fold_convert (TREE_TYPE (c), mask));
5029 /* If necessary, convert the type back to match the type of C. */
5030 if (TYPE_UNSIGNED (type))
5031 temp = fold_convert (type, temp);
5033 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5036 /* For an expression that has the form
5037 (A && B) || ~B
5039 (A || B) && ~B,
5040 we can drop one of the inner expressions and simplify to
5041 A || ~B
5043 A && ~B
5044 LOC is the location of the resulting expression. OP is the inner
5045 logical operation; the left-hand side in the examples above, while CMPOP
5046 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5047 removing a condition that guards another, as in
5048 (A != NULL && A->...) || A == NULL
5049 which we must not transform. If RHS_ONLY is true, only eliminate the
5050 right-most operand of the inner logical operation. */
5052 static tree
5053 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5054 bool rhs_only)
5056 tree type = TREE_TYPE (cmpop);
5057 enum tree_code code = TREE_CODE (cmpop);
5058 enum tree_code truthop_code = TREE_CODE (op);
5059 tree lhs = TREE_OPERAND (op, 0);
5060 tree rhs = TREE_OPERAND (op, 1);
5061 tree orig_lhs = lhs, orig_rhs = rhs;
5062 enum tree_code rhs_code = TREE_CODE (rhs);
5063 enum tree_code lhs_code = TREE_CODE (lhs);
5064 enum tree_code inv_code;
5066 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5067 return NULL_TREE;
5069 if (TREE_CODE_CLASS (code) != tcc_comparison)
5070 return NULL_TREE;
5072 if (rhs_code == truthop_code)
5074 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5075 if (newrhs != NULL_TREE)
5077 rhs = newrhs;
5078 rhs_code = TREE_CODE (rhs);
5081 if (lhs_code == truthop_code && !rhs_only)
5083 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5084 if (newlhs != NULL_TREE)
5086 lhs = newlhs;
5087 lhs_code = TREE_CODE (lhs);
5091 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5092 if (inv_code == rhs_code
5093 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5094 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5095 return lhs;
5096 if (!rhs_only && inv_code == lhs_code
5097 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5098 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5099 return rhs;
5100 if (rhs != orig_rhs || lhs != orig_lhs)
5101 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5102 lhs, rhs);
5103 return NULL_TREE;
5106 /* Find ways of folding logical expressions of LHS and RHS:
5107 Try to merge two comparisons to the same innermost item.
5108 Look for range tests like "ch >= '0' && ch <= '9'".
5109 Look for combinations of simple terms on machines with expensive branches
5110 and evaluate the RHS unconditionally.
5112 For example, if we have p->a == 2 && p->b == 4 and we can make an
5113 object large enough to span both A and B, we can do this with a comparison
5114 against the object ANDed with the a mask.
5116 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5117 operations to do this with one comparison.
5119 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5120 function and the one above.
5122 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5123 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5125 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5126 two operands.
5128 We return the simplified tree or 0 if no optimization is possible. */
5130 static tree
5131 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5132 tree lhs, tree rhs)
5134 /* If this is the "or" of two comparisons, we can do something if
5135 the comparisons are NE_EXPR. If this is the "and", we can do something
5136 if the comparisons are EQ_EXPR. I.e.,
5137 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5139 WANTED_CODE is this operation code. For single bit fields, we can
5140 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5141 comparison for one-bit fields. */
5143 enum tree_code wanted_code;
5144 enum tree_code lcode, rcode;
5145 tree ll_arg, lr_arg, rl_arg, rr_arg;
5146 tree ll_inner, lr_inner, rl_inner, rr_inner;
5147 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5148 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5149 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5150 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5151 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5152 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5153 enum machine_mode lnmode, rnmode;
5154 tree ll_mask, lr_mask, rl_mask, rr_mask;
5155 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5156 tree l_const, r_const;
5157 tree lntype, rntype, result;
5158 HOST_WIDE_INT first_bit, end_bit;
5159 int volatilep;
5161 /* Start by getting the comparison codes. Fail if anything is volatile.
5162 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5163 it were surrounded with a NE_EXPR. */
5165 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5166 return 0;
5168 lcode = TREE_CODE (lhs);
5169 rcode = TREE_CODE (rhs);
5171 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5173 lhs = build2 (NE_EXPR, truth_type, lhs,
5174 build_int_cst (TREE_TYPE (lhs), 0));
5175 lcode = NE_EXPR;
5178 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5180 rhs = build2 (NE_EXPR, truth_type, rhs,
5181 build_int_cst (TREE_TYPE (rhs), 0));
5182 rcode = NE_EXPR;
5185 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5186 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5187 return 0;
5189 ll_arg = TREE_OPERAND (lhs, 0);
5190 lr_arg = TREE_OPERAND (lhs, 1);
5191 rl_arg = TREE_OPERAND (rhs, 0);
5192 rr_arg = TREE_OPERAND (rhs, 1);
5194 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5195 if (simple_operand_p (ll_arg)
5196 && simple_operand_p (lr_arg))
5198 if (operand_equal_p (ll_arg, rl_arg, 0)
5199 && operand_equal_p (lr_arg, rr_arg, 0))
5201 result = combine_comparisons (loc, code, lcode, rcode,
5202 truth_type, ll_arg, lr_arg);
5203 if (result)
5204 return result;
5206 else if (operand_equal_p (ll_arg, rr_arg, 0)
5207 && operand_equal_p (lr_arg, rl_arg, 0))
5209 result = combine_comparisons (loc, code, lcode,
5210 swap_tree_comparison (rcode),
5211 truth_type, ll_arg, lr_arg);
5212 if (result)
5213 return result;
5217 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5218 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5220 /* If the RHS can be evaluated unconditionally and its operands are
5221 simple, it wins to evaluate the RHS unconditionally on machines
5222 with expensive branches. In this case, this isn't a comparison
5223 that can be merged. */
5225 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5226 false) >= 2
5227 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5228 && simple_operand_p (rl_arg)
5229 && simple_operand_p (rr_arg))
5231 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5232 if (code == TRUTH_OR_EXPR
5233 && lcode == NE_EXPR && integer_zerop (lr_arg)
5234 && rcode == NE_EXPR && integer_zerop (rr_arg)
5235 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5236 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5237 return build2_loc (loc, NE_EXPR, truth_type,
5238 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5239 ll_arg, rl_arg),
5240 build_int_cst (TREE_TYPE (ll_arg), 0));
5242 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5243 if (code == TRUTH_AND_EXPR
5244 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5245 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5246 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5247 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5248 return build2_loc (loc, EQ_EXPR, truth_type,
5249 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5250 ll_arg, rl_arg),
5251 build_int_cst (TREE_TYPE (ll_arg), 0));
5254 /* See if the comparisons can be merged. Then get all the parameters for
5255 each side. */
5257 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5258 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5259 return 0;
5261 volatilep = 0;
5262 ll_inner = decode_field_reference (loc, ll_arg,
5263 &ll_bitsize, &ll_bitpos, &ll_mode,
5264 &ll_unsignedp, &volatilep, &ll_mask,
5265 &ll_and_mask);
5266 lr_inner = decode_field_reference (loc, lr_arg,
5267 &lr_bitsize, &lr_bitpos, &lr_mode,
5268 &lr_unsignedp, &volatilep, &lr_mask,
5269 &lr_and_mask);
5270 rl_inner = decode_field_reference (loc, rl_arg,
5271 &rl_bitsize, &rl_bitpos, &rl_mode,
5272 &rl_unsignedp, &volatilep, &rl_mask,
5273 &rl_and_mask);
5274 rr_inner = decode_field_reference (loc, rr_arg,
5275 &rr_bitsize, &rr_bitpos, &rr_mode,
5276 &rr_unsignedp, &volatilep, &rr_mask,
5277 &rr_and_mask);
5279 /* It must be true that the inner operation on the lhs of each
5280 comparison must be the same if we are to be able to do anything.
5281 Then see if we have constants. If not, the same must be true for
5282 the rhs's. */
5283 if (volatilep || ll_inner == 0 || rl_inner == 0
5284 || ! operand_equal_p (ll_inner, rl_inner, 0))
5285 return 0;
5287 if (TREE_CODE (lr_arg) == INTEGER_CST
5288 && TREE_CODE (rr_arg) == INTEGER_CST)
5289 l_const = lr_arg, r_const = rr_arg;
5290 else if (lr_inner == 0 || rr_inner == 0
5291 || ! operand_equal_p (lr_inner, rr_inner, 0))
5292 return 0;
5293 else
5294 l_const = r_const = 0;
5296 /* If either comparison code is not correct for our logical operation,
5297 fail. However, we can convert a one-bit comparison against zero into
5298 the opposite comparison against that bit being set in the field. */
5300 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5301 if (lcode != wanted_code)
5303 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5305 /* Make the left operand unsigned, since we are only interested
5306 in the value of one bit. Otherwise we are doing the wrong
5307 thing below. */
5308 ll_unsignedp = 1;
5309 l_const = ll_mask;
5311 else
5312 return 0;
5315 /* This is analogous to the code for l_const above. */
5316 if (rcode != wanted_code)
5318 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5320 rl_unsignedp = 1;
5321 r_const = rl_mask;
5323 else
5324 return 0;
5327 /* See if we can find a mode that contains both fields being compared on
5328 the left. If we can't, fail. Otherwise, update all constants and masks
5329 to be relative to a field of that size. */
5330 first_bit = MIN (ll_bitpos, rl_bitpos);
5331 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5332 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5333 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5334 volatilep);
5335 if (lnmode == VOIDmode)
5336 return 0;
5338 lnbitsize = GET_MODE_BITSIZE (lnmode);
5339 lnbitpos = first_bit & ~ (lnbitsize - 1);
5340 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5341 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5343 if (BYTES_BIG_ENDIAN)
5345 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5346 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5349 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5350 size_int (xll_bitpos));
5351 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5352 size_int (xrl_bitpos));
5354 if (l_const)
5356 l_const = fold_convert_loc (loc, lntype, l_const);
5357 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5358 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5359 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5360 fold_build1_loc (loc, BIT_NOT_EXPR,
5361 lntype, ll_mask))))
5363 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5365 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5368 if (r_const)
5370 r_const = fold_convert_loc (loc, lntype, r_const);
5371 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5372 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5373 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5374 fold_build1_loc (loc, BIT_NOT_EXPR,
5375 lntype, rl_mask))))
5377 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5379 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5383 /* If the right sides are not constant, do the same for it. Also,
5384 disallow this optimization if a size or signedness mismatch occurs
5385 between the left and right sides. */
5386 if (l_const == 0)
5388 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5389 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5390 /* Make sure the two fields on the right
5391 correspond to the left without being swapped. */
5392 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5393 return 0;
5395 first_bit = MIN (lr_bitpos, rr_bitpos);
5396 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5397 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5398 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5399 volatilep);
5400 if (rnmode == VOIDmode)
5401 return 0;
5403 rnbitsize = GET_MODE_BITSIZE (rnmode);
5404 rnbitpos = first_bit & ~ (rnbitsize - 1);
5405 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5406 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5408 if (BYTES_BIG_ENDIAN)
5410 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5411 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5414 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5415 rntype, lr_mask),
5416 size_int (xlr_bitpos));
5417 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5418 rntype, rr_mask),
5419 size_int (xrr_bitpos));
5421 /* Make a mask that corresponds to both fields being compared.
5422 Do this for both items being compared. If the operands are the
5423 same size and the bits being compared are in the same position
5424 then we can do this by masking both and comparing the masked
5425 results. */
5426 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5427 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5428 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5430 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5431 ll_unsignedp || rl_unsignedp);
5432 if (! all_ones_mask_p (ll_mask, lnbitsize))
5433 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5435 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5436 lr_unsignedp || rr_unsignedp);
5437 if (! all_ones_mask_p (lr_mask, rnbitsize))
5438 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5440 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5443 /* There is still another way we can do something: If both pairs of
5444 fields being compared are adjacent, we may be able to make a wider
5445 field containing them both.
5447 Note that we still must mask the lhs/rhs expressions. Furthermore,
5448 the mask must be shifted to account for the shift done by
5449 make_bit_field_ref. */
5450 if ((ll_bitsize + ll_bitpos == rl_bitpos
5451 && lr_bitsize + lr_bitpos == rr_bitpos)
5452 || (ll_bitpos == rl_bitpos + rl_bitsize
5453 && lr_bitpos == rr_bitpos + rr_bitsize))
5455 tree type;
5457 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5458 ll_bitsize + rl_bitsize,
5459 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5460 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5461 lr_bitsize + rr_bitsize,
5462 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5464 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5465 size_int (MIN (xll_bitpos, xrl_bitpos)));
5466 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5467 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5469 /* Convert to the smaller type before masking out unwanted bits. */
5470 type = lntype;
5471 if (lntype != rntype)
5473 if (lnbitsize > rnbitsize)
5475 lhs = fold_convert_loc (loc, rntype, lhs);
5476 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5477 type = rntype;
5479 else if (lnbitsize < rnbitsize)
5481 rhs = fold_convert_loc (loc, lntype, rhs);
5482 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5483 type = lntype;
5487 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5488 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5490 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5491 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5493 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5496 return 0;
5499 /* Handle the case of comparisons with constants. If there is something in
5500 common between the masks, those bits of the constants must be the same.
5501 If not, the condition is always false. Test for this to avoid generating
5502 incorrect code below. */
5503 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5504 if (! integer_zerop (result)
5505 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5506 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5508 if (wanted_code == NE_EXPR)
5510 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5511 return constant_boolean_node (true, truth_type);
5513 else
5515 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5516 return constant_boolean_node (false, truth_type);
5520 /* Construct the expression we will return. First get the component
5521 reference we will make. Unless the mask is all ones the width of
5522 that field, perform the mask operation. Then compare with the
5523 merged constant. */
5524 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5525 ll_unsignedp || rl_unsignedp);
5527 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5528 if (! all_ones_mask_p (ll_mask, lnbitsize))
5529 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5531 return build2_loc (loc, wanted_code, truth_type, result,
5532 const_binop (BIT_IOR_EXPR, l_const, r_const));
5535 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5536 constant. */
5538 static tree
5539 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5540 tree op0, tree op1)
5542 tree arg0 = op0;
5543 enum tree_code op_code;
5544 tree comp_const;
5545 tree minmax_const;
5546 int consts_equal, consts_lt;
5547 tree inner;
5549 STRIP_SIGN_NOPS (arg0);
5551 op_code = TREE_CODE (arg0);
5552 minmax_const = TREE_OPERAND (arg0, 1);
5553 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5554 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5555 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5556 inner = TREE_OPERAND (arg0, 0);
5558 /* If something does not permit us to optimize, return the original tree. */
5559 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5560 || TREE_CODE (comp_const) != INTEGER_CST
5561 || TREE_OVERFLOW (comp_const)
5562 || TREE_CODE (minmax_const) != INTEGER_CST
5563 || TREE_OVERFLOW (minmax_const))
5564 return NULL_TREE;
5566 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5567 and GT_EXPR, doing the rest with recursive calls using logical
5568 simplifications. */
5569 switch (code)
5571 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5573 tree tem
5574 = optimize_minmax_comparison (loc,
5575 invert_tree_comparison (code, false),
5576 type, op0, op1);
5577 if (tem)
5578 return invert_truthvalue_loc (loc, tem);
5579 return NULL_TREE;
5582 case GE_EXPR:
5583 return
5584 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5585 optimize_minmax_comparison
5586 (loc, EQ_EXPR, type, arg0, comp_const),
5587 optimize_minmax_comparison
5588 (loc, GT_EXPR, type, arg0, comp_const));
5590 case EQ_EXPR:
5591 if (op_code == MAX_EXPR && consts_equal)
5592 /* MAX (X, 0) == 0 -> X <= 0 */
5593 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5595 else if (op_code == MAX_EXPR && consts_lt)
5596 /* MAX (X, 0) == 5 -> X == 5 */
5597 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5599 else if (op_code == MAX_EXPR)
5600 /* MAX (X, 0) == -1 -> false */
5601 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5603 else if (consts_equal)
5604 /* MIN (X, 0) == 0 -> X >= 0 */
5605 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5607 else if (consts_lt)
5608 /* MIN (X, 0) == 5 -> false */
5609 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5611 else
5612 /* MIN (X, 0) == -1 -> X == -1 */
5613 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5615 case GT_EXPR:
5616 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5617 /* MAX (X, 0) > 0 -> X > 0
5618 MAX (X, 0) > 5 -> X > 5 */
5619 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5621 else if (op_code == MAX_EXPR)
5622 /* MAX (X, 0) > -1 -> true */
5623 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5625 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5626 /* MIN (X, 0) > 0 -> false
5627 MIN (X, 0) > 5 -> false */
5628 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5630 else
5631 /* MIN (X, 0) > -1 -> X > -1 */
5632 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5634 default:
5635 return NULL_TREE;
5639 /* T is an integer expression that is being multiplied, divided, or taken a
5640 modulus (CODE says which and what kind of divide or modulus) by a
5641 constant C. See if we can eliminate that operation by folding it with
5642 other operations already in T. WIDE_TYPE, if non-null, is a type that
5643 should be used for the computation if wider than our type.
5645 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5646 (X * 2) + (Y * 4). We must, however, be assured that either the original
5647 expression would not overflow or that overflow is undefined for the type
5648 in the language in question.
5650 If we return a non-null expression, it is an equivalent form of the
5651 original computation, but need not be in the original type.
5653 We set *STRICT_OVERFLOW_P to true if the return values depends on
5654 signed overflow being undefined. Otherwise we do not change
5655 *STRICT_OVERFLOW_P. */
5657 static tree
5658 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5659 bool *strict_overflow_p)
5661 /* To avoid exponential search depth, refuse to allow recursion past
5662 three levels. Beyond that (1) it's highly unlikely that we'll find
5663 something interesting and (2) we've probably processed it before
5664 when we built the inner expression. */
5666 static int depth;
5667 tree ret;
5669 if (depth > 3)
5670 return NULL;
5672 depth++;
5673 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5674 depth--;
5676 return ret;
5679 static tree
5680 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5681 bool *strict_overflow_p)
5683 tree type = TREE_TYPE (t);
5684 enum tree_code tcode = TREE_CODE (t);
5685 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5686 > GET_MODE_SIZE (TYPE_MODE (type)))
5687 ? wide_type : type);
5688 tree t1, t2;
5689 int same_p = tcode == code;
5690 tree op0 = NULL_TREE, op1 = NULL_TREE;
5691 bool sub_strict_overflow_p;
5693 /* Don't deal with constants of zero here; they confuse the code below. */
5694 if (integer_zerop (c))
5695 return NULL_TREE;
5697 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5698 op0 = TREE_OPERAND (t, 0);
5700 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5701 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5703 /* Note that we need not handle conditional operations here since fold
5704 already handles those cases. So just do arithmetic here. */
5705 switch (tcode)
5707 case INTEGER_CST:
5708 /* For a constant, we can always simplify if we are a multiply
5709 or (for divide and modulus) if it is a multiple of our constant. */
5710 if (code == MULT_EXPR
5711 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5712 return const_binop (code, fold_convert (ctype, t),
5713 fold_convert (ctype, c));
5714 break;
5716 CASE_CONVERT: case NON_LVALUE_EXPR:
5717 /* If op0 is an expression ... */
5718 if ((COMPARISON_CLASS_P (op0)
5719 || UNARY_CLASS_P (op0)
5720 || BINARY_CLASS_P (op0)
5721 || VL_EXP_CLASS_P (op0)
5722 || EXPRESSION_CLASS_P (op0))
5723 /* ... and has wrapping overflow, and its type is smaller
5724 than ctype, then we cannot pass through as widening. */
5725 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5726 && (TYPE_PRECISION (ctype)
5727 > TYPE_PRECISION (TREE_TYPE (op0))))
5728 /* ... or this is a truncation (t is narrower than op0),
5729 then we cannot pass through this narrowing. */
5730 || (TYPE_PRECISION (type)
5731 < TYPE_PRECISION (TREE_TYPE (op0)))
5732 /* ... or signedness changes for division or modulus,
5733 then we cannot pass through this conversion. */
5734 || (code != MULT_EXPR
5735 && (TYPE_UNSIGNED (ctype)
5736 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5737 /* ... or has undefined overflow while the converted to
5738 type has not, we cannot do the operation in the inner type
5739 as that would introduce undefined overflow. */
5740 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5741 && !TYPE_OVERFLOW_UNDEFINED (type))))
5742 break;
5744 /* Pass the constant down and see if we can make a simplification. If
5745 we can, replace this expression with the inner simplification for
5746 possible later conversion to our or some other type. */
5747 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5748 && TREE_CODE (t2) == INTEGER_CST
5749 && !TREE_OVERFLOW (t2)
5750 && (0 != (t1 = extract_muldiv (op0, t2, code,
5751 code == MULT_EXPR
5752 ? ctype : NULL_TREE,
5753 strict_overflow_p))))
5754 return t1;
5755 break;
5757 case ABS_EXPR:
5758 /* If widening the type changes it from signed to unsigned, then we
5759 must avoid building ABS_EXPR itself as unsigned. */
5760 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5762 tree cstype = (*signed_type_for) (ctype);
5763 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5764 != 0)
5766 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5767 return fold_convert (ctype, t1);
5769 break;
5771 /* If the constant is negative, we cannot simplify this. */
5772 if (tree_int_cst_sgn (c) == -1)
5773 break;
5774 /* FALLTHROUGH */
5775 case NEGATE_EXPR:
5776 /* For division and modulus, type can't be unsigned, as e.g.
5777 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5778 For signed types, even with wrapping overflow, this is fine. */
5779 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5780 break;
5781 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5782 != 0)
5783 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5784 break;
5786 case MIN_EXPR: case MAX_EXPR:
5787 /* If widening the type changes the signedness, then we can't perform
5788 this optimization as that changes the result. */
5789 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5790 break;
5792 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5793 sub_strict_overflow_p = false;
5794 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5795 &sub_strict_overflow_p)) != 0
5796 && (t2 = extract_muldiv (op1, c, code, wide_type,
5797 &sub_strict_overflow_p)) != 0)
5799 if (tree_int_cst_sgn (c) < 0)
5800 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5801 if (sub_strict_overflow_p)
5802 *strict_overflow_p = true;
5803 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5804 fold_convert (ctype, t2));
5806 break;
5808 case LSHIFT_EXPR: case RSHIFT_EXPR:
5809 /* If the second operand is constant, this is a multiplication
5810 or floor division, by a power of two, so we can treat it that
5811 way unless the multiplier or divisor overflows. Signed
5812 left-shift overflow is implementation-defined rather than
5813 undefined in C90, so do not convert signed left shift into
5814 multiplication. */
5815 if (TREE_CODE (op1) == INTEGER_CST
5816 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5817 /* const_binop may not detect overflow correctly,
5818 so check for it explicitly here. */
5819 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5820 && TREE_INT_CST_HIGH (op1) == 0
5821 && 0 != (t1 = fold_convert (ctype,
5822 const_binop (LSHIFT_EXPR,
5823 size_one_node,
5824 op1)))
5825 && !TREE_OVERFLOW (t1))
5826 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5827 ? MULT_EXPR : FLOOR_DIV_EXPR,
5828 ctype,
5829 fold_convert (ctype, op0),
5830 t1),
5831 c, code, wide_type, strict_overflow_p);
5832 break;
5834 case PLUS_EXPR: case MINUS_EXPR:
5835 /* See if we can eliminate the operation on both sides. If we can, we
5836 can return a new PLUS or MINUS. If we can't, the only remaining
5837 cases where we can do anything are if the second operand is a
5838 constant. */
5839 sub_strict_overflow_p = false;
5840 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5841 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5842 if (t1 != 0 && t2 != 0
5843 && (code == MULT_EXPR
5844 /* If not multiplication, we can only do this if both operands
5845 are divisible by c. */
5846 || (multiple_of_p (ctype, op0, c)
5847 && multiple_of_p (ctype, op1, c))))
5849 if (sub_strict_overflow_p)
5850 *strict_overflow_p = true;
5851 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5852 fold_convert (ctype, t2));
5855 /* If this was a subtraction, negate OP1 and set it to be an addition.
5856 This simplifies the logic below. */
5857 if (tcode == MINUS_EXPR)
5859 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5860 /* If OP1 was not easily negatable, the constant may be OP0. */
5861 if (TREE_CODE (op0) == INTEGER_CST)
5863 tree tem = op0;
5864 op0 = op1;
5865 op1 = tem;
5866 tem = t1;
5867 t1 = t2;
5868 t2 = tem;
5872 if (TREE_CODE (op1) != INTEGER_CST)
5873 break;
5875 /* If either OP1 or C are negative, this optimization is not safe for
5876 some of the division and remainder types while for others we need
5877 to change the code. */
5878 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5880 if (code == CEIL_DIV_EXPR)
5881 code = FLOOR_DIV_EXPR;
5882 else if (code == FLOOR_DIV_EXPR)
5883 code = CEIL_DIV_EXPR;
5884 else if (code != MULT_EXPR
5885 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5886 break;
5889 /* If it's a multiply or a division/modulus operation of a multiple
5890 of our constant, do the operation and verify it doesn't overflow. */
5891 if (code == MULT_EXPR
5892 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5894 op1 = const_binop (code, fold_convert (ctype, op1),
5895 fold_convert (ctype, c));
5896 /* We allow the constant to overflow with wrapping semantics. */
5897 if (op1 == 0
5898 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5899 break;
5901 else
5902 break;
5904 /* If we have an unsigned type, we cannot widen the operation since it
5905 will change the result if the original computation overflowed. */
5906 if (TYPE_UNSIGNED (ctype) && ctype != type)
5907 break;
5909 /* If we were able to eliminate our operation from the first side,
5910 apply our operation to the second side and reform the PLUS. */
5911 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5912 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5914 /* The last case is if we are a multiply. In that case, we can
5915 apply the distributive law to commute the multiply and addition
5916 if the multiplication of the constants doesn't overflow
5917 and overflow is defined. With undefined overflow
5918 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5919 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5920 return fold_build2 (tcode, ctype,
5921 fold_build2 (code, ctype,
5922 fold_convert (ctype, op0),
5923 fold_convert (ctype, c)),
5924 op1);
5926 break;
5928 case MULT_EXPR:
5929 /* We have a special case here if we are doing something like
5930 (C * 8) % 4 since we know that's zero. */
5931 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5932 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5933 /* If the multiplication can overflow we cannot optimize this. */
5934 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5935 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5936 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5938 *strict_overflow_p = true;
5939 return omit_one_operand (type, integer_zero_node, op0);
5942 /* ... fall through ... */
5944 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5945 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5946 /* If we can extract our operation from the LHS, do so and return a
5947 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5948 do something only if the second operand is a constant. */
5949 if (same_p
5950 && (t1 = extract_muldiv (op0, c, code, wide_type,
5951 strict_overflow_p)) != 0)
5952 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5953 fold_convert (ctype, op1));
5954 else if (tcode == MULT_EXPR && code == MULT_EXPR
5955 && (t1 = extract_muldiv (op1, c, code, wide_type,
5956 strict_overflow_p)) != 0)
5957 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5958 fold_convert (ctype, t1));
5959 else if (TREE_CODE (op1) != INTEGER_CST)
5960 return 0;
5962 /* If these are the same operation types, we can associate them
5963 assuming no overflow. */
5964 if (tcode == code)
5966 double_int mul;
5967 bool overflow_p;
5968 unsigned prec = TYPE_PRECISION (ctype);
5969 bool uns = TYPE_UNSIGNED (ctype);
5970 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5971 double_int dic = tree_to_double_int (c).ext (prec, uns);
5972 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5973 overflow_p = ((!uns && overflow_p)
5974 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5975 if (!double_int_fits_to_tree_p (ctype, mul)
5976 && ((uns && tcode != MULT_EXPR) || !uns))
5977 overflow_p = 1;
5978 if (!overflow_p)
5979 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5980 double_int_to_tree (ctype, mul));
5983 /* If these operations "cancel" each other, we have the main
5984 optimizations of this pass, which occur when either constant is a
5985 multiple of the other, in which case we replace this with either an
5986 operation or CODE or TCODE.
5988 If we have an unsigned type, we cannot do this since it will change
5989 the result if the original computation overflowed. */
5990 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5991 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5992 || (tcode == MULT_EXPR
5993 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5994 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5995 && code != MULT_EXPR)))
5997 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5999 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6000 *strict_overflow_p = true;
6001 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6002 fold_convert (ctype,
6003 const_binop (TRUNC_DIV_EXPR,
6004 op1, c)));
6006 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6008 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6009 *strict_overflow_p = true;
6010 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6011 fold_convert (ctype,
6012 const_binop (TRUNC_DIV_EXPR,
6013 c, op1)));
6016 break;
6018 default:
6019 break;
6022 return 0;
6025 /* Return a node which has the indicated constant VALUE (either 0 or
6026 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6027 and is of the indicated TYPE. */
6029 tree
6030 constant_boolean_node (bool value, tree type)
6032 if (type == integer_type_node)
6033 return value ? integer_one_node : integer_zero_node;
6034 else if (type == boolean_type_node)
6035 return value ? boolean_true_node : boolean_false_node;
6036 else if (TREE_CODE (type) == VECTOR_TYPE)
6037 return build_vector_from_val (type,
6038 build_int_cst (TREE_TYPE (type),
6039 value ? -1 : 0));
6040 else
6041 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6045 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6046 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6047 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6048 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6049 COND is the first argument to CODE; otherwise (as in the example
6050 given here), it is the second argument. TYPE is the type of the
6051 original expression. Return NULL_TREE if no simplification is
6052 possible. */
6054 static tree
6055 fold_binary_op_with_conditional_arg (location_t loc,
6056 enum tree_code code,
6057 tree type, tree op0, tree op1,
6058 tree cond, tree arg, int cond_first_p)
6060 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6061 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6062 tree test, true_value, false_value;
6063 tree lhs = NULL_TREE;
6064 tree rhs = NULL_TREE;
6065 enum tree_code cond_code = COND_EXPR;
6067 if (TREE_CODE (cond) == COND_EXPR
6068 || TREE_CODE (cond) == VEC_COND_EXPR)
6070 test = TREE_OPERAND (cond, 0);
6071 true_value = TREE_OPERAND (cond, 1);
6072 false_value = TREE_OPERAND (cond, 2);
6073 /* If this operand throws an expression, then it does not make
6074 sense to try to perform a logical or arithmetic operation
6075 involving it. */
6076 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6077 lhs = true_value;
6078 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6079 rhs = false_value;
6081 else
6083 tree testtype = TREE_TYPE (cond);
6084 test = cond;
6085 true_value = constant_boolean_node (true, testtype);
6086 false_value = constant_boolean_node (false, testtype);
6089 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6090 cond_code = VEC_COND_EXPR;
6092 /* This transformation is only worthwhile if we don't have to wrap ARG
6093 in a SAVE_EXPR and the operation can be simplified without recursing
6094 on at least one of the branches once its pushed inside the COND_EXPR. */
6095 if (!TREE_CONSTANT (arg)
6096 && (TREE_SIDE_EFFECTS (arg)
6097 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6098 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6099 return NULL_TREE;
6101 arg = fold_convert_loc (loc, arg_type, arg);
6102 if (lhs == 0)
6104 true_value = fold_convert_loc (loc, cond_type, true_value);
6105 if (cond_first_p)
6106 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6107 else
6108 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6110 if (rhs == 0)
6112 false_value = fold_convert_loc (loc, cond_type, false_value);
6113 if (cond_first_p)
6114 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6115 else
6116 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6119 /* Check that we have simplified at least one of the branches. */
6120 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6121 return NULL_TREE;
6123 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6127 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6129 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6130 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6131 ADDEND is the same as X.
6133 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6134 and finite. The problematic cases are when X is zero, and its mode
6135 has signed zeros. In the case of rounding towards -infinity,
6136 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6137 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6139 bool
6140 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6142 if (!real_zerop (addend))
6143 return false;
6145 /* Don't allow the fold with -fsignaling-nans. */
6146 if (HONOR_SNANS (TYPE_MODE (type)))
6147 return false;
6149 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6150 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6151 return true;
6153 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6154 if (TREE_CODE (addend) == REAL_CST
6155 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6156 negate = !negate;
6158 /* The mode has signed zeros, and we have to honor their sign.
6159 In this situation, there is only one case we can return true for.
6160 X - 0 is the same as X unless rounding towards -infinity is
6161 supported. */
6162 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6165 /* Subroutine of fold() that checks comparisons of built-in math
6166 functions against real constants.
6168 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6169 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6170 is the type of the result and ARG0 and ARG1 are the operands of the
6171 comparison. ARG1 must be a TREE_REAL_CST.
6173 The function returns the constant folded tree if a simplification
6174 can be made, and NULL_TREE otherwise. */
6176 static tree
6177 fold_mathfn_compare (location_t loc,
6178 enum built_in_function fcode, enum tree_code code,
6179 tree type, tree arg0, tree arg1)
6181 REAL_VALUE_TYPE c;
6183 if (BUILTIN_SQRT_P (fcode))
6185 tree arg = CALL_EXPR_ARG (arg0, 0);
6186 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6188 c = TREE_REAL_CST (arg1);
6189 if (REAL_VALUE_NEGATIVE (c))
6191 /* sqrt(x) < y is always false, if y is negative. */
6192 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6193 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6195 /* sqrt(x) > y is always true, if y is negative and we
6196 don't care about NaNs, i.e. negative values of x. */
6197 if (code == NE_EXPR || !HONOR_NANS (mode))
6198 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6200 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6201 return fold_build2_loc (loc, GE_EXPR, type, arg,
6202 build_real (TREE_TYPE (arg), dconst0));
6204 else if (code == GT_EXPR || code == GE_EXPR)
6206 REAL_VALUE_TYPE c2;
6208 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6209 real_convert (&c2, mode, &c2);
6211 if (REAL_VALUE_ISINF (c2))
6213 /* sqrt(x) > y is x == +Inf, when y is very large. */
6214 if (HONOR_INFINITIES (mode))
6215 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6216 build_real (TREE_TYPE (arg), c2));
6218 /* sqrt(x) > y is always false, when y is very large
6219 and we don't care about infinities. */
6220 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6223 /* sqrt(x) > c is the same as x > c*c. */
6224 return fold_build2_loc (loc, code, type, arg,
6225 build_real (TREE_TYPE (arg), c2));
6227 else if (code == LT_EXPR || code == LE_EXPR)
6229 REAL_VALUE_TYPE c2;
6231 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6232 real_convert (&c2, mode, &c2);
6234 if (REAL_VALUE_ISINF (c2))
6236 /* sqrt(x) < y is always true, when y is a very large
6237 value and we don't care about NaNs or Infinities. */
6238 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6239 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6241 /* sqrt(x) < y is x != +Inf when y is very large and we
6242 don't care about NaNs. */
6243 if (! HONOR_NANS (mode))
6244 return fold_build2_loc (loc, NE_EXPR, type, arg,
6245 build_real (TREE_TYPE (arg), c2));
6247 /* sqrt(x) < y is x >= 0 when y is very large and we
6248 don't care about Infinities. */
6249 if (! HONOR_INFINITIES (mode))
6250 return fold_build2_loc (loc, GE_EXPR, type, arg,
6251 build_real (TREE_TYPE (arg), dconst0));
6253 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6254 arg = save_expr (arg);
6255 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6256 fold_build2_loc (loc, GE_EXPR, type, arg,
6257 build_real (TREE_TYPE (arg),
6258 dconst0)),
6259 fold_build2_loc (loc, NE_EXPR, type, arg,
6260 build_real (TREE_TYPE (arg),
6261 c2)));
6264 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6265 if (! HONOR_NANS (mode))
6266 return fold_build2_loc (loc, code, type, arg,
6267 build_real (TREE_TYPE (arg), c2));
6269 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6270 arg = save_expr (arg);
6271 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6272 fold_build2_loc (loc, GE_EXPR, type, arg,
6273 build_real (TREE_TYPE (arg),
6274 dconst0)),
6275 fold_build2_loc (loc, code, type, arg,
6276 build_real (TREE_TYPE (arg),
6277 c2)));
6281 return NULL_TREE;
6284 /* Subroutine of fold() that optimizes comparisons against Infinities,
6285 either +Inf or -Inf.
6287 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6288 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6289 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6291 The function returns the constant folded tree if a simplification
6292 can be made, and NULL_TREE otherwise. */
6294 static tree
6295 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6296 tree arg0, tree arg1)
6298 enum machine_mode mode;
6299 REAL_VALUE_TYPE max;
6300 tree temp;
6301 bool neg;
6303 mode = TYPE_MODE (TREE_TYPE (arg0));
6305 /* For negative infinity swap the sense of the comparison. */
6306 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6307 if (neg)
6308 code = swap_tree_comparison (code);
6310 switch (code)
6312 case GT_EXPR:
6313 /* x > +Inf is always false, if with ignore sNANs. */
6314 if (HONOR_SNANS (mode))
6315 return NULL_TREE;
6316 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6318 case LE_EXPR:
6319 /* x <= +Inf is always true, if we don't case about NaNs. */
6320 if (! HONOR_NANS (mode))
6321 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6323 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6324 arg0 = save_expr (arg0);
6325 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6327 case EQ_EXPR:
6328 case GE_EXPR:
6329 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6330 real_maxval (&max, neg, mode);
6331 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6332 arg0, build_real (TREE_TYPE (arg0), max));
6334 case LT_EXPR:
6335 /* x < +Inf is always equal to x <= DBL_MAX. */
6336 real_maxval (&max, neg, mode);
6337 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6338 arg0, build_real (TREE_TYPE (arg0), max));
6340 case NE_EXPR:
6341 /* x != +Inf is always equal to !(x > DBL_MAX). */
6342 real_maxval (&max, neg, mode);
6343 if (! HONOR_NANS (mode))
6344 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6345 arg0, build_real (TREE_TYPE (arg0), max));
6347 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6348 arg0, build_real (TREE_TYPE (arg0), max));
6349 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6351 default:
6352 break;
6355 return NULL_TREE;
6358 /* Subroutine of fold() that optimizes comparisons of a division by
6359 a nonzero integer constant against an integer constant, i.e.
6360 X/C1 op C2.
6362 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6363 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6364 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6366 The function returns the constant folded tree if a simplification
6367 can be made, and NULL_TREE otherwise. */
6369 static tree
6370 fold_div_compare (location_t loc,
6371 enum tree_code code, tree type, tree arg0, tree arg1)
6373 tree prod, tmp, hi, lo;
6374 tree arg00 = TREE_OPERAND (arg0, 0);
6375 tree arg01 = TREE_OPERAND (arg0, 1);
6376 double_int val;
6377 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6378 bool neg_overflow;
6379 bool overflow;
6381 /* We have to do this the hard way to detect unsigned overflow.
6382 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6383 val = TREE_INT_CST (arg01)
6384 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6385 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6386 neg_overflow = false;
6388 if (unsigned_p)
6390 tmp = int_const_binop (MINUS_EXPR, arg01,
6391 build_int_cst (TREE_TYPE (arg01), 1));
6392 lo = prod;
6394 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6395 val = TREE_INT_CST (prod)
6396 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6397 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6398 -1, overflow | TREE_OVERFLOW (prod));
6400 else if (tree_int_cst_sgn (arg01) >= 0)
6402 tmp = int_const_binop (MINUS_EXPR, arg01,
6403 build_int_cst (TREE_TYPE (arg01), 1));
6404 switch (tree_int_cst_sgn (arg1))
6406 case -1:
6407 neg_overflow = true;
6408 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6409 hi = prod;
6410 break;
6412 case 0:
6413 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6414 hi = tmp;
6415 break;
6417 case 1:
6418 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6419 lo = prod;
6420 break;
6422 default:
6423 gcc_unreachable ();
6426 else
6428 /* A negative divisor reverses the relational operators. */
6429 code = swap_tree_comparison (code);
6431 tmp = int_const_binop (PLUS_EXPR, arg01,
6432 build_int_cst (TREE_TYPE (arg01), 1));
6433 switch (tree_int_cst_sgn (arg1))
6435 case -1:
6436 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6437 lo = prod;
6438 break;
6440 case 0:
6441 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6442 lo = tmp;
6443 break;
6445 case 1:
6446 neg_overflow = true;
6447 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6448 hi = prod;
6449 break;
6451 default:
6452 gcc_unreachable ();
6456 switch (code)
6458 case EQ_EXPR:
6459 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6460 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6461 if (TREE_OVERFLOW (hi))
6462 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6463 if (TREE_OVERFLOW (lo))
6464 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6465 return build_range_check (loc, type, arg00, 1, lo, hi);
6467 case NE_EXPR:
6468 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6469 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6470 if (TREE_OVERFLOW (hi))
6471 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6472 if (TREE_OVERFLOW (lo))
6473 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6474 return build_range_check (loc, type, arg00, 0, lo, hi);
6476 case LT_EXPR:
6477 if (TREE_OVERFLOW (lo))
6479 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6480 return omit_one_operand_loc (loc, type, tmp, arg00);
6482 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6484 case LE_EXPR:
6485 if (TREE_OVERFLOW (hi))
6487 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6488 return omit_one_operand_loc (loc, type, tmp, arg00);
6490 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6492 case GT_EXPR:
6493 if (TREE_OVERFLOW (hi))
6495 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6496 return omit_one_operand_loc (loc, type, tmp, arg00);
6498 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6500 case GE_EXPR:
6501 if (TREE_OVERFLOW (lo))
6503 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6504 return omit_one_operand_loc (loc, type, tmp, arg00);
6506 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6508 default:
6509 break;
6512 return NULL_TREE;
6516 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6517 equality/inequality test, then return a simplified form of the test
6518 using a sign testing. Otherwise return NULL. TYPE is the desired
6519 result type. */
6521 static tree
6522 fold_single_bit_test_into_sign_test (location_t loc,
6523 enum tree_code code, tree arg0, tree arg1,
6524 tree result_type)
6526 /* If this is testing a single bit, we can optimize the test. */
6527 if ((code == NE_EXPR || code == EQ_EXPR)
6528 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6529 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6531 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6532 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6533 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6535 if (arg00 != NULL_TREE
6536 /* This is only a win if casting to a signed type is cheap,
6537 i.e. when arg00's type is not a partial mode. */
6538 && TYPE_PRECISION (TREE_TYPE (arg00))
6539 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6541 tree stype = signed_type_for (TREE_TYPE (arg00));
6542 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6543 result_type,
6544 fold_convert_loc (loc, stype, arg00),
6545 build_int_cst (stype, 0));
6549 return NULL_TREE;
6552 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6553 equality/inequality test, then return a simplified form of
6554 the test using shifts and logical operations. Otherwise return
6555 NULL. TYPE is the desired result type. */
6557 tree
6558 fold_single_bit_test (location_t loc, enum tree_code code,
6559 tree arg0, tree arg1, tree result_type)
6561 /* If this is testing a single bit, we can optimize the test. */
6562 if ((code == NE_EXPR || code == EQ_EXPR)
6563 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6564 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6566 tree inner = TREE_OPERAND (arg0, 0);
6567 tree type = TREE_TYPE (arg0);
6568 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6569 enum machine_mode operand_mode = TYPE_MODE (type);
6570 int ops_unsigned;
6571 tree signed_type, unsigned_type, intermediate_type;
6572 tree tem, one;
6574 /* First, see if we can fold the single bit test into a sign-bit
6575 test. */
6576 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6577 result_type);
6578 if (tem)
6579 return tem;
6581 /* Otherwise we have (A & C) != 0 where C is a single bit,
6582 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6583 Similarly for (A & C) == 0. */
6585 /* If INNER is a right shift of a constant and it plus BITNUM does
6586 not overflow, adjust BITNUM and INNER. */
6587 if (TREE_CODE (inner) == RSHIFT_EXPR
6588 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6589 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6590 && bitnum < TYPE_PRECISION (type)
6591 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6592 bitnum - TYPE_PRECISION (type)))
6594 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6595 inner = TREE_OPERAND (inner, 0);
6598 /* If we are going to be able to omit the AND below, we must do our
6599 operations as unsigned. If we must use the AND, we have a choice.
6600 Normally unsigned is faster, but for some machines signed is. */
6601 #ifdef LOAD_EXTEND_OP
6602 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6603 && !flag_syntax_only) ? 0 : 1;
6604 #else
6605 ops_unsigned = 1;
6606 #endif
6608 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6609 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6610 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6611 inner = fold_convert_loc (loc, intermediate_type, inner);
6613 if (bitnum != 0)
6614 inner = build2 (RSHIFT_EXPR, intermediate_type,
6615 inner, size_int (bitnum));
6617 one = build_int_cst (intermediate_type, 1);
6619 if (code == EQ_EXPR)
6620 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6622 /* Put the AND last so it can combine with more things. */
6623 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6625 /* Make sure to return the proper type. */
6626 inner = fold_convert_loc (loc, result_type, inner);
6628 return inner;
6630 return NULL_TREE;
6633 /* Check whether we are allowed to reorder operands arg0 and arg1,
6634 such that the evaluation of arg1 occurs before arg0. */
6636 static bool
6637 reorder_operands_p (const_tree arg0, const_tree arg1)
6639 if (! flag_evaluation_order)
6640 return true;
6641 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6642 return true;
6643 return ! TREE_SIDE_EFFECTS (arg0)
6644 && ! TREE_SIDE_EFFECTS (arg1);
6647 /* Test whether it is preferable two swap two operands, ARG0 and
6648 ARG1, for example because ARG0 is an integer constant and ARG1
6649 isn't. If REORDER is true, only recommend swapping if we can
6650 evaluate the operands in reverse order. */
6652 bool
6653 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6655 STRIP_SIGN_NOPS (arg0);
6656 STRIP_SIGN_NOPS (arg1);
6658 if (TREE_CODE (arg1) == INTEGER_CST)
6659 return 0;
6660 if (TREE_CODE (arg0) == INTEGER_CST)
6661 return 1;
6663 if (TREE_CODE (arg1) == REAL_CST)
6664 return 0;
6665 if (TREE_CODE (arg0) == REAL_CST)
6666 return 1;
6668 if (TREE_CODE (arg1) == FIXED_CST)
6669 return 0;
6670 if (TREE_CODE (arg0) == FIXED_CST)
6671 return 1;
6673 if (TREE_CODE (arg1) == COMPLEX_CST)
6674 return 0;
6675 if (TREE_CODE (arg0) == COMPLEX_CST)
6676 return 1;
6678 if (TREE_CONSTANT (arg1))
6679 return 0;
6680 if (TREE_CONSTANT (arg0))
6681 return 1;
6683 if (optimize_function_for_size_p (cfun))
6684 return 0;
6686 if (reorder && flag_evaluation_order
6687 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6688 return 0;
6690 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6691 for commutative and comparison operators. Ensuring a canonical
6692 form allows the optimizers to find additional redundancies without
6693 having to explicitly check for both orderings. */
6694 if (TREE_CODE (arg0) == SSA_NAME
6695 && TREE_CODE (arg1) == SSA_NAME
6696 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6697 return 1;
6699 /* Put SSA_NAMEs last. */
6700 if (TREE_CODE (arg1) == SSA_NAME)
6701 return 0;
6702 if (TREE_CODE (arg0) == SSA_NAME)
6703 return 1;
6705 /* Put variables last. */
6706 if (DECL_P (arg1))
6707 return 0;
6708 if (DECL_P (arg0))
6709 return 1;
6711 return 0;
6714 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6715 ARG0 is extended to a wider type. */
6717 static tree
6718 fold_widened_comparison (location_t loc, enum tree_code code,
6719 tree type, tree arg0, tree arg1)
6721 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6722 tree arg1_unw;
6723 tree shorter_type, outer_type;
6724 tree min, max;
6725 bool above, below;
6727 if (arg0_unw == arg0)
6728 return NULL_TREE;
6729 shorter_type = TREE_TYPE (arg0_unw);
6731 #ifdef HAVE_canonicalize_funcptr_for_compare
6732 /* Disable this optimization if we're casting a function pointer
6733 type on targets that require function pointer canonicalization. */
6734 if (HAVE_canonicalize_funcptr_for_compare
6735 && TREE_CODE (shorter_type) == POINTER_TYPE
6736 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6737 return NULL_TREE;
6738 #endif
6740 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6741 return NULL_TREE;
6743 arg1_unw = get_unwidened (arg1, NULL_TREE);
6745 /* If possible, express the comparison in the shorter mode. */
6746 if ((code == EQ_EXPR || code == NE_EXPR
6747 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6748 && (TREE_TYPE (arg1_unw) == shorter_type
6749 || ((TYPE_PRECISION (shorter_type)
6750 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6751 && (TYPE_UNSIGNED (shorter_type)
6752 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6753 || (TREE_CODE (arg1_unw) == INTEGER_CST
6754 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6755 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6756 && int_fits_type_p (arg1_unw, shorter_type))))
6757 return fold_build2_loc (loc, code, type, arg0_unw,
6758 fold_convert_loc (loc, shorter_type, arg1_unw));
6760 if (TREE_CODE (arg1_unw) != INTEGER_CST
6761 || TREE_CODE (shorter_type) != INTEGER_TYPE
6762 || !int_fits_type_p (arg1_unw, shorter_type))
6763 return NULL_TREE;
6765 /* If we are comparing with the integer that does not fit into the range
6766 of the shorter type, the result is known. */
6767 outer_type = TREE_TYPE (arg1_unw);
6768 min = lower_bound_in_type (outer_type, shorter_type);
6769 max = upper_bound_in_type (outer_type, shorter_type);
6771 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6772 max, arg1_unw));
6773 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6774 arg1_unw, min));
6776 switch (code)
6778 case EQ_EXPR:
6779 if (above || below)
6780 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6781 break;
6783 case NE_EXPR:
6784 if (above || below)
6785 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6786 break;
6788 case LT_EXPR:
6789 case LE_EXPR:
6790 if (above)
6791 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6792 else if (below)
6793 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6795 case GT_EXPR:
6796 case GE_EXPR:
6797 if (above)
6798 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6799 else if (below)
6800 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6802 default:
6803 break;
6806 return NULL_TREE;
6809 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6810 ARG0 just the signedness is changed. */
6812 static tree
6813 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6814 tree arg0, tree arg1)
6816 tree arg0_inner;
6817 tree inner_type, outer_type;
6819 if (!CONVERT_EXPR_P (arg0))
6820 return NULL_TREE;
6822 outer_type = TREE_TYPE (arg0);
6823 arg0_inner = TREE_OPERAND (arg0, 0);
6824 inner_type = TREE_TYPE (arg0_inner);
6826 #ifdef HAVE_canonicalize_funcptr_for_compare
6827 /* Disable this optimization if we're casting a function pointer
6828 type on targets that require function pointer canonicalization. */
6829 if (HAVE_canonicalize_funcptr_for_compare
6830 && TREE_CODE (inner_type) == POINTER_TYPE
6831 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6832 return NULL_TREE;
6833 #endif
6835 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6836 return NULL_TREE;
6838 if (TREE_CODE (arg1) != INTEGER_CST
6839 && !(CONVERT_EXPR_P (arg1)
6840 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6841 return NULL_TREE;
6843 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6844 && code != NE_EXPR
6845 && code != EQ_EXPR)
6846 return NULL_TREE;
6848 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6849 return NULL_TREE;
6851 if (TREE_CODE (arg1) == INTEGER_CST)
6852 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6853 0, TREE_OVERFLOW (arg1));
6854 else
6855 arg1 = fold_convert_loc (loc, inner_type, arg1);
6857 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6860 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6861 step of the array. Reconstructs s and delta in the case of s *
6862 delta being an integer constant (and thus already folded). ADDR is
6863 the address. MULT is the multiplicative expression. If the
6864 function succeeds, the new address expression is returned.
6865 Otherwise NULL_TREE is returned. LOC is the location of the
6866 resulting expression. */
6868 static tree
6869 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6871 tree s, delta, step;
6872 tree ref = TREE_OPERAND (addr, 0), pref;
6873 tree ret, pos;
6874 tree itype;
6875 bool mdim = false;
6877 /* Strip the nops that might be added when converting op1 to sizetype. */
6878 STRIP_NOPS (op1);
6880 /* Canonicalize op1 into a possibly non-constant delta
6881 and an INTEGER_CST s. */
6882 if (TREE_CODE (op1) == MULT_EXPR)
6884 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6886 STRIP_NOPS (arg0);
6887 STRIP_NOPS (arg1);
6889 if (TREE_CODE (arg0) == INTEGER_CST)
6891 s = arg0;
6892 delta = arg1;
6894 else if (TREE_CODE (arg1) == INTEGER_CST)
6896 s = arg1;
6897 delta = arg0;
6899 else
6900 return NULL_TREE;
6902 else if (TREE_CODE (op1) == INTEGER_CST)
6904 delta = op1;
6905 s = NULL_TREE;
6907 else
6909 /* Simulate we are delta * 1. */
6910 delta = op1;
6911 s = integer_one_node;
6914 /* Handle &x.array the same as we would handle &x.array[0]. */
6915 if (TREE_CODE (ref) == COMPONENT_REF
6916 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6918 tree domain;
6920 /* Remember if this was a multi-dimensional array. */
6921 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6922 mdim = true;
6924 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6925 if (! domain)
6926 goto cont;
6927 itype = TREE_TYPE (domain);
6929 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6930 if (TREE_CODE (step) != INTEGER_CST)
6931 goto cont;
6933 if (s)
6935 if (! tree_int_cst_equal (step, s))
6936 goto cont;
6938 else
6940 /* Try if delta is a multiple of step. */
6941 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6942 if (! tmp)
6943 goto cont;
6944 delta = tmp;
6947 /* Only fold here if we can verify we do not overflow one
6948 dimension of a multi-dimensional array. */
6949 if (mdim)
6951 tree tmp;
6953 if (!TYPE_MIN_VALUE (domain)
6954 || !TYPE_MAX_VALUE (domain)
6955 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6956 goto cont;
6958 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6959 fold_convert_loc (loc, itype,
6960 TYPE_MIN_VALUE (domain)),
6961 fold_convert_loc (loc, itype, delta));
6962 if (TREE_CODE (tmp) != INTEGER_CST
6963 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6964 goto cont;
6967 /* We found a suitable component reference. */
6969 pref = TREE_OPERAND (addr, 0);
6970 ret = copy_node (pref);
6971 SET_EXPR_LOCATION (ret, loc);
6973 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6974 fold_build2_loc
6975 (loc, PLUS_EXPR, itype,
6976 fold_convert_loc (loc, itype,
6977 TYPE_MIN_VALUE
6978 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6979 fold_convert_loc (loc, itype, delta)),
6980 NULL_TREE, NULL_TREE);
6981 return build_fold_addr_expr_loc (loc, ret);
6984 cont:
6986 for (;; ref = TREE_OPERAND (ref, 0))
6988 if (TREE_CODE (ref) == ARRAY_REF)
6990 tree domain;
6992 /* Remember if this was a multi-dimensional array. */
6993 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6994 mdim = true;
6996 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6997 if (! domain)
6998 continue;
6999 itype = TREE_TYPE (domain);
7001 step = array_ref_element_size (ref);
7002 if (TREE_CODE (step) != INTEGER_CST)
7003 continue;
7005 if (s)
7007 if (! tree_int_cst_equal (step, s))
7008 continue;
7010 else
7012 /* Try if delta is a multiple of step. */
7013 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7014 if (! tmp)
7015 continue;
7016 delta = tmp;
7019 /* Only fold here if we can verify we do not overflow one
7020 dimension of a multi-dimensional array. */
7021 if (mdim)
7023 tree tmp;
7025 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7026 || !TYPE_MAX_VALUE (domain)
7027 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7028 continue;
7030 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7031 fold_convert_loc (loc, itype,
7032 TREE_OPERAND (ref, 1)),
7033 fold_convert_loc (loc, itype, delta));
7034 if (!tmp
7035 || TREE_CODE (tmp) != INTEGER_CST
7036 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7037 continue;
7040 break;
7042 else
7043 mdim = false;
7045 if (!handled_component_p (ref))
7046 return NULL_TREE;
7049 /* We found the suitable array reference. So copy everything up to it,
7050 and replace the index. */
7052 pref = TREE_OPERAND (addr, 0);
7053 ret = copy_node (pref);
7054 SET_EXPR_LOCATION (ret, loc);
7055 pos = ret;
7057 while (pref != ref)
7059 pref = TREE_OPERAND (pref, 0);
7060 TREE_OPERAND (pos, 0) = copy_node (pref);
7061 pos = TREE_OPERAND (pos, 0);
7064 TREE_OPERAND (pos, 1)
7065 = fold_build2_loc (loc, PLUS_EXPR, itype,
7066 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7067 fold_convert_loc (loc, itype, delta));
7068 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7072 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7073 means A >= Y && A != MAX, but in this case we know that
7074 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7076 static tree
7077 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7079 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7081 if (TREE_CODE (bound) == LT_EXPR)
7082 a = TREE_OPERAND (bound, 0);
7083 else if (TREE_CODE (bound) == GT_EXPR)
7084 a = TREE_OPERAND (bound, 1);
7085 else
7086 return NULL_TREE;
7088 typea = TREE_TYPE (a);
7089 if (!INTEGRAL_TYPE_P (typea)
7090 && !POINTER_TYPE_P (typea))
7091 return NULL_TREE;
7093 if (TREE_CODE (ineq) == LT_EXPR)
7095 a1 = TREE_OPERAND (ineq, 1);
7096 y = TREE_OPERAND (ineq, 0);
7098 else if (TREE_CODE (ineq) == GT_EXPR)
7100 a1 = TREE_OPERAND (ineq, 0);
7101 y = TREE_OPERAND (ineq, 1);
7103 else
7104 return NULL_TREE;
7106 if (TREE_TYPE (a1) != typea)
7107 return NULL_TREE;
7109 if (POINTER_TYPE_P (typea))
7111 /* Convert the pointer types into integer before taking the difference. */
7112 tree ta = fold_convert_loc (loc, ssizetype, a);
7113 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7114 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7116 else
7117 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7119 if (!diff || !integer_onep (diff))
7120 return NULL_TREE;
7122 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7125 /* Fold a sum or difference of at least one multiplication.
7126 Returns the folded tree or NULL if no simplification could be made. */
7128 static tree
7129 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7130 tree arg0, tree arg1)
7132 tree arg00, arg01, arg10, arg11;
7133 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7135 /* (A * C) +- (B * C) -> (A+-B) * C.
7136 (A * C) +- A -> A * (C+-1).
7137 We are most concerned about the case where C is a constant,
7138 but other combinations show up during loop reduction. Since
7139 it is not difficult, try all four possibilities. */
7141 if (TREE_CODE (arg0) == MULT_EXPR)
7143 arg00 = TREE_OPERAND (arg0, 0);
7144 arg01 = TREE_OPERAND (arg0, 1);
7146 else if (TREE_CODE (arg0) == INTEGER_CST)
7148 arg00 = build_one_cst (type);
7149 arg01 = arg0;
7151 else
7153 /* We cannot generate constant 1 for fract. */
7154 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7155 return NULL_TREE;
7156 arg00 = arg0;
7157 arg01 = build_one_cst (type);
7159 if (TREE_CODE (arg1) == MULT_EXPR)
7161 arg10 = TREE_OPERAND (arg1, 0);
7162 arg11 = TREE_OPERAND (arg1, 1);
7164 else if (TREE_CODE (arg1) == INTEGER_CST)
7166 arg10 = build_one_cst (type);
7167 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7168 the purpose of this canonicalization. */
7169 if (TREE_INT_CST_HIGH (arg1) == -1
7170 && negate_expr_p (arg1)
7171 && code == PLUS_EXPR)
7173 arg11 = negate_expr (arg1);
7174 code = MINUS_EXPR;
7176 else
7177 arg11 = arg1;
7179 else
7181 /* We cannot generate constant 1 for fract. */
7182 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7183 return NULL_TREE;
7184 arg10 = arg1;
7185 arg11 = build_one_cst (type);
7187 same = NULL_TREE;
7189 if (operand_equal_p (arg01, arg11, 0))
7190 same = arg01, alt0 = arg00, alt1 = arg10;
7191 else if (operand_equal_p (arg00, arg10, 0))
7192 same = arg00, alt0 = arg01, alt1 = arg11;
7193 else if (operand_equal_p (arg00, arg11, 0))
7194 same = arg00, alt0 = arg01, alt1 = arg10;
7195 else if (operand_equal_p (arg01, arg10, 0))
7196 same = arg01, alt0 = arg00, alt1 = arg11;
7198 /* No identical multiplicands; see if we can find a common
7199 power-of-two factor in non-power-of-two multiplies. This
7200 can help in multi-dimensional array access. */
7201 else if (host_integerp (arg01, 0)
7202 && host_integerp (arg11, 0))
7204 HOST_WIDE_INT int01, int11, tmp;
7205 bool swap = false;
7206 tree maybe_same;
7207 int01 = TREE_INT_CST_LOW (arg01);
7208 int11 = TREE_INT_CST_LOW (arg11);
7210 /* Move min of absolute values to int11. */
7211 if (absu_hwi (int01) < absu_hwi (int11))
7213 tmp = int01, int01 = int11, int11 = tmp;
7214 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7215 maybe_same = arg01;
7216 swap = true;
7218 else
7219 maybe_same = arg11;
7221 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7222 /* The remainder should not be a constant, otherwise we
7223 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7224 increased the number of multiplications necessary. */
7225 && TREE_CODE (arg10) != INTEGER_CST)
7227 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7228 build_int_cst (TREE_TYPE (arg00),
7229 int01 / int11));
7230 alt1 = arg10;
7231 same = maybe_same;
7232 if (swap)
7233 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7237 if (same)
7238 return fold_build2_loc (loc, MULT_EXPR, type,
7239 fold_build2_loc (loc, code, type,
7240 fold_convert_loc (loc, type, alt0),
7241 fold_convert_loc (loc, type, alt1)),
7242 fold_convert_loc (loc, type, same));
7244 return NULL_TREE;
7247 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7248 specified by EXPR into the buffer PTR of length LEN bytes.
7249 Return the number of bytes placed in the buffer, or zero
7250 upon failure. */
7252 static int
7253 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7255 tree type = TREE_TYPE (expr);
7256 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7257 int byte, offset, word, words;
7258 unsigned char value;
7260 if (total_bytes > len)
7261 return 0;
7262 words = total_bytes / UNITS_PER_WORD;
7264 for (byte = 0; byte < total_bytes; byte++)
7266 int bitpos = byte * BITS_PER_UNIT;
7267 if (bitpos < HOST_BITS_PER_WIDE_INT)
7268 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7269 else
7270 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7271 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7273 if (total_bytes > UNITS_PER_WORD)
7275 word = byte / UNITS_PER_WORD;
7276 if (WORDS_BIG_ENDIAN)
7277 word = (words - 1) - word;
7278 offset = word * UNITS_PER_WORD;
7279 if (BYTES_BIG_ENDIAN)
7280 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7281 else
7282 offset += byte % UNITS_PER_WORD;
7284 else
7285 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7286 ptr[offset] = value;
7288 return total_bytes;
7292 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7293 specified by EXPR into the buffer PTR of length LEN bytes.
7294 Return the number of bytes placed in the buffer, or zero
7295 upon failure. */
7297 static int
7298 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7300 tree type = TREE_TYPE (expr);
7301 enum machine_mode mode = TYPE_MODE (type);
7302 int total_bytes = GET_MODE_SIZE (mode);
7303 FIXED_VALUE_TYPE value;
7304 tree i_value, i_type;
7306 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7307 return 0;
7309 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7311 if (NULL_TREE == i_type
7312 || TYPE_PRECISION (i_type) != total_bytes)
7313 return 0;
7315 value = TREE_FIXED_CST (expr);
7316 i_value = double_int_to_tree (i_type, value.data);
7318 return native_encode_int (i_value, ptr, len);
7322 /* Subroutine of native_encode_expr. Encode the REAL_CST
7323 specified by EXPR into the buffer PTR of length LEN bytes.
7324 Return the number of bytes placed in the buffer, or zero
7325 upon failure. */
7327 static int
7328 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7330 tree type = TREE_TYPE (expr);
7331 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7332 int byte, offset, word, words, bitpos;
7333 unsigned char value;
7335 /* There are always 32 bits in each long, no matter the size of
7336 the hosts long. We handle floating point representations with
7337 up to 192 bits. */
7338 long tmp[6];
7340 if (total_bytes > len)
7341 return 0;
7342 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7344 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7346 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7347 bitpos += BITS_PER_UNIT)
7349 byte = (bitpos / BITS_PER_UNIT) & 3;
7350 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7352 if (UNITS_PER_WORD < 4)
7354 word = byte / UNITS_PER_WORD;
7355 if (WORDS_BIG_ENDIAN)
7356 word = (words - 1) - word;
7357 offset = word * UNITS_PER_WORD;
7358 if (BYTES_BIG_ENDIAN)
7359 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7360 else
7361 offset += byte % UNITS_PER_WORD;
7363 else
7364 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7365 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7367 return total_bytes;
7370 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7371 specified by EXPR into the buffer PTR of length LEN bytes.
7372 Return the number of bytes placed in the buffer, or zero
7373 upon failure. */
7375 static int
7376 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7378 int rsize, isize;
7379 tree part;
7381 part = TREE_REALPART (expr);
7382 rsize = native_encode_expr (part, ptr, len);
7383 if (rsize == 0)
7384 return 0;
7385 part = TREE_IMAGPART (expr);
7386 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7387 if (isize != rsize)
7388 return 0;
7389 return rsize + isize;
7393 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7394 specified by EXPR into the buffer PTR of length LEN bytes.
7395 Return the number of bytes placed in the buffer, or zero
7396 upon failure. */
7398 static int
7399 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7401 unsigned i, count;
7402 int size, offset;
7403 tree itype, elem;
7405 offset = 0;
7406 count = VECTOR_CST_NELTS (expr);
7407 itype = TREE_TYPE (TREE_TYPE (expr));
7408 size = GET_MODE_SIZE (TYPE_MODE (itype));
7409 for (i = 0; i < count; i++)
7411 elem = VECTOR_CST_ELT (expr, i);
7412 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7413 return 0;
7414 offset += size;
7416 return offset;
7420 /* Subroutine of native_encode_expr. Encode the STRING_CST
7421 specified by EXPR into the buffer PTR of length LEN bytes.
7422 Return the number of bytes placed in the buffer, or zero
7423 upon failure. */
7425 static int
7426 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7428 tree type = TREE_TYPE (expr);
7429 HOST_WIDE_INT total_bytes;
7431 if (TREE_CODE (type) != ARRAY_TYPE
7432 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7433 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7434 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7435 return 0;
7436 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7437 if (total_bytes > len)
7438 return 0;
7439 if (TREE_STRING_LENGTH (expr) < total_bytes)
7441 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7442 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7443 total_bytes - TREE_STRING_LENGTH (expr));
7445 else
7446 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7447 return total_bytes;
7451 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7452 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7453 buffer PTR of length LEN bytes. Return the number of bytes
7454 placed in the buffer, or zero upon failure. */
7457 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7459 switch (TREE_CODE (expr))
7461 case INTEGER_CST:
7462 return native_encode_int (expr, ptr, len);
7464 case REAL_CST:
7465 return native_encode_real (expr, ptr, len);
7467 case FIXED_CST:
7468 return native_encode_fixed (expr, ptr, len);
7470 case COMPLEX_CST:
7471 return native_encode_complex (expr, ptr, len);
7473 case VECTOR_CST:
7474 return native_encode_vector (expr, ptr, len);
7476 case STRING_CST:
7477 return native_encode_string (expr, ptr, len);
7479 default:
7480 return 0;
7485 /* Subroutine of native_interpret_expr. Interpret the contents of
7486 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7487 If the buffer cannot be interpreted, return NULL_TREE. */
7489 static tree
7490 native_interpret_int (tree type, const unsigned char *ptr, int len)
7492 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7493 double_int result;
7495 if (total_bytes > len
7496 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7497 return NULL_TREE;
7499 result = double_int::from_buffer (ptr, total_bytes);
7501 return double_int_to_tree (type, result);
7505 /* Subroutine of native_interpret_expr. Interpret the contents of
7506 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7507 If the buffer cannot be interpreted, return NULL_TREE. */
7509 static tree
7510 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7512 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7513 double_int result;
7514 FIXED_VALUE_TYPE fixed_value;
7516 if (total_bytes > len
7517 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7518 return NULL_TREE;
7520 result = double_int::from_buffer (ptr, total_bytes);
7521 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7523 return build_fixed (type, fixed_value);
7527 /* Subroutine of native_interpret_expr. Interpret the contents of
7528 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7529 If the buffer cannot be interpreted, return NULL_TREE. */
7531 static tree
7532 native_interpret_real (tree type, const unsigned char *ptr, int len)
7534 enum machine_mode mode = TYPE_MODE (type);
7535 int total_bytes = GET_MODE_SIZE (mode);
7536 int byte, offset, word, words, bitpos;
7537 unsigned char value;
7538 /* There are always 32 bits in each long, no matter the size of
7539 the hosts long. We handle floating point representations with
7540 up to 192 bits. */
7541 REAL_VALUE_TYPE r;
7542 long tmp[6];
7544 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7545 if (total_bytes > len || total_bytes > 24)
7546 return NULL_TREE;
7547 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7549 memset (tmp, 0, sizeof (tmp));
7550 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7551 bitpos += BITS_PER_UNIT)
7553 byte = (bitpos / BITS_PER_UNIT) & 3;
7554 if (UNITS_PER_WORD < 4)
7556 word = byte / UNITS_PER_WORD;
7557 if (WORDS_BIG_ENDIAN)
7558 word = (words - 1) - word;
7559 offset = word * UNITS_PER_WORD;
7560 if (BYTES_BIG_ENDIAN)
7561 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7562 else
7563 offset += byte % UNITS_PER_WORD;
7565 else
7566 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7567 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7569 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7572 real_from_target (&r, tmp, mode);
7573 return build_real (type, r);
7577 /* Subroutine of native_interpret_expr. Interpret the contents of
7578 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7579 If the buffer cannot be interpreted, return NULL_TREE. */
7581 static tree
7582 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7584 tree etype, rpart, ipart;
7585 int size;
7587 etype = TREE_TYPE (type);
7588 size = GET_MODE_SIZE (TYPE_MODE (etype));
7589 if (size * 2 > len)
7590 return NULL_TREE;
7591 rpart = native_interpret_expr (etype, ptr, size);
7592 if (!rpart)
7593 return NULL_TREE;
7594 ipart = native_interpret_expr (etype, ptr+size, size);
7595 if (!ipart)
7596 return NULL_TREE;
7597 return build_complex (type, rpart, ipart);
7601 /* Subroutine of native_interpret_expr. Interpret the contents of
7602 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7603 If the buffer cannot be interpreted, return NULL_TREE. */
7605 static tree
7606 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7608 tree etype, elem;
7609 int i, size, count;
7610 tree *elements;
7612 etype = TREE_TYPE (type);
7613 size = GET_MODE_SIZE (TYPE_MODE (etype));
7614 count = TYPE_VECTOR_SUBPARTS (type);
7615 if (size * count > len)
7616 return NULL_TREE;
7618 elements = XALLOCAVEC (tree, count);
7619 for (i = count - 1; i >= 0; i--)
7621 elem = native_interpret_expr (etype, ptr+(i*size), size);
7622 if (!elem)
7623 return NULL_TREE;
7624 elements[i] = elem;
7626 return build_vector (type, elements);
7630 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7631 the buffer PTR of length LEN as a constant of type TYPE. For
7632 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7633 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7634 return NULL_TREE. */
7636 tree
7637 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7639 switch (TREE_CODE (type))
7641 case INTEGER_TYPE:
7642 case ENUMERAL_TYPE:
7643 case BOOLEAN_TYPE:
7644 case POINTER_TYPE:
7645 case REFERENCE_TYPE:
7646 return native_interpret_int (type, ptr, len);
7648 case REAL_TYPE:
7649 return native_interpret_real (type, ptr, len);
7651 case FIXED_POINT_TYPE:
7652 return native_interpret_fixed (type, ptr, len);
7654 case COMPLEX_TYPE:
7655 return native_interpret_complex (type, ptr, len);
7657 case VECTOR_TYPE:
7658 return native_interpret_vector (type, ptr, len);
7660 default:
7661 return NULL_TREE;
7665 /* Returns true if we can interpret the contents of a native encoding
7666 as TYPE. */
7668 static bool
7669 can_native_interpret_type_p (tree type)
7671 switch (TREE_CODE (type))
7673 case INTEGER_TYPE:
7674 case ENUMERAL_TYPE:
7675 case BOOLEAN_TYPE:
7676 case POINTER_TYPE:
7677 case REFERENCE_TYPE:
7678 case FIXED_POINT_TYPE:
7679 case REAL_TYPE:
7680 case COMPLEX_TYPE:
7681 case VECTOR_TYPE:
7682 return true;
7683 default:
7684 return false;
7688 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7689 TYPE at compile-time. If we're unable to perform the conversion
7690 return NULL_TREE. */
7692 static tree
7693 fold_view_convert_expr (tree type, tree expr)
7695 /* We support up to 512-bit values (for V8DFmode). */
7696 unsigned char buffer[64];
7697 int len;
7699 /* Check that the host and target are sane. */
7700 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7701 return NULL_TREE;
7703 len = native_encode_expr (expr, buffer, sizeof (buffer));
7704 if (len == 0)
7705 return NULL_TREE;
7707 return native_interpret_expr (type, buffer, len);
7710 /* Build an expression for the address of T. Folds away INDIRECT_REF
7711 to avoid confusing the gimplify process. */
7713 tree
7714 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7716 /* The size of the object is not relevant when talking about its address. */
7717 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7718 t = TREE_OPERAND (t, 0);
7720 if (TREE_CODE (t) == INDIRECT_REF)
7722 t = TREE_OPERAND (t, 0);
7724 if (TREE_TYPE (t) != ptrtype)
7725 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7727 else if (TREE_CODE (t) == MEM_REF
7728 && integer_zerop (TREE_OPERAND (t, 1)))
7729 return TREE_OPERAND (t, 0);
7730 else if (TREE_CODE (t) == MEM_REF
7731 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7732 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7733 TREE_OPERAND (t, 0),
7734 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7735 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7737 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7739 if (TREE_TYPE (t) != ptrtype)
7740 t = fold_convert_loc (loc, ptrtype, t);
7742 else
7743 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7745 return t;
7748 /* Build an expression for the address of T. */
7750 tree
7751 build_fold_addr_expr_loc (location_t loc, tree t)
7753 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7755 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7758 static bool vec_cst_ctor_to_array (tree, tree *);
7760 /* Fold a unary expression of code CODE and type TYPE with operand
7761 OP0. Return the folded expression if folding is successful.
7762 Otherwise, return NULL_TREE. */
7764 tree
7765 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7767 tree tem;
7768 tree arg0;
7769 enum tree_code_class kind = TREE_CODE_CLASS (code);
7771 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7772 && TREE_CODE_LENGTH (code) == 1);
7774 arg0 = op0;
7775 if (arg0)
7777 if (CONVERT_EXPR_CODE_P (code)
7778 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7780 /* Don't use STRIP_NOPS, because signedness of argument type
7781 matters. */
7782 STRIP_SIGN_NOPS (arg0);
7784 else
7786 /* Strip any conversions that don't change the mode. This
7787 is safe for every expression, except for a comparison
7788 expression because its signedness is derived from its
7789 operands.
7791 Note that this is done as an internal manipulation within
7792 the constant folder, in order to find the simplest
7793 representation of the arguments so that their form can be
7794 studied. In any cases, the appropriate type conversions
7795 should be put back in the tree that will get out of the
7796 constant folder. */
7797 STRIP_NOPS (arg0);
7801 if (TREE_CODE_CLASS (code) == tcc_unary)
7803 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7804 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7805 fold_build1_loc (loc, code, type,
7806 fold_convert_loc (loc, TREE_TYPE (op0),
7807 TREE_OPERAND (arg0, 1))));
7808 else if (TREE_CODE (arg0) == COND_EXPR)
7810 tree arg01 = TREE_OPERAND (arg0, 1);
7811 tree arg02 = TREE_OPERAND (arg0, 2);
7812 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7813 arg01 = fold_build1_loc (loc, code, type,
7814 fold_convert_loc (loc,
7815 TREE_TYPE (op0), arg01));
7816 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7817 arg02 = fold_build1_loc (loc, code, type,
7818 fold_convert_loc (loc,
7819 TREE_TYPE (op0), arg02));
7820 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7821 arg01, arg02);
7823 /* If this was a conversion, and all we did was to move into
7824 inside the COND_EXPR, bring it back out. But leave it if
7825 it is a conversion from integer to integer and the
7826 result precision is no wider than a word since such a
7827 conversion is cheap and may be optimized away by combine,
7828 while it couldn't if it were outside the COND_EXPR. Then return
7829 so we don't get into an infinite recursion loop taking the
7830 conversion out and then back in. */
7832 if ((CONVERT_EXPR_CODE_P (code)
7833 || code == NON_LVALUE_EXPR)
7834 && TREE_CODE (tem) == COND_EXPR
7835 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7836 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7837 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7838 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7839 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7840 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7841 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7842 && (INTEGRAL_TYPE_P
7843 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7844 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7845 || flag_syntax_only))
7846 tem = build1_loc (loc, code, type,
7847 build3 (COND_EXPR,
7848 TREE_TYPE (TREE_OPERAND
7849 (TREE_OPERAND (tem, 1), 0)),
7850 TREE_OPERAND (tem, 0),
7851 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7852 TREE_OPERAND (TREE_OPERAND (tem, 2),
7853 0)));
7854 return tem;
7858 switch (code)
7860 case PAREN_EXPR:
7861 /* Re-association barriers around constants and other re-association
7862 barriers can be removed. */
7863 if (CONSTANT_CLASS_P (op0)
7864 || TREE_CODE (op0) == PAREN_EXPR)
7865 return fold_convert_loc (loc, type, op0);
7866 return NULL_TREE;
7868 CASE_CONVERT:
7869 case FLOAT_EXPR:
7870 case FIX_TRUNC_EXPR:
7871 if (TREE_TYPE (op0) == type)
7872 return op0;
7874 if (COMPARISON_CLASS_P (op0))
7876 /* If we have (type) (a CMP b) and type is an integral type, return
7877 new expression involving the new type. Canonicalize
7878 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7879 non-integral type.
7880 Do not fold the result as that would not simplify further, also
7881 folding again results in recursions. */
7882 if (TREE_CODE (type) == BOOLEAN_TYPE)
7883 return build2_loc (loc, TREE_CODE (op0), type,
7884 TREE_OPERAND (op0, 0),
7885 TREE_OPERAND (op0, 1));
7886 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7887 && TREE_CODE (type) != VECTOR_TYPE)
7888 return build3_loc (loc, COND_EXPR, type, op0,
7889 constant_boolean_node (true, type),
7890 constant_boolean_node (false, type));
7893 /* Handle cases of two conversions in a row. */
7894 if (CONVERT_EXPR_P (op0))
7896 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7897 tree inter_type = TREE_TYPE (op0);
7898 int inside_int = INTEGRAL_TYPE_P (inside_type);
7899 int inside_ptr = POINTER_TYPE_P (inside_type);
7900 int inside_float = FLOAT_TYPE_P (inside_type);
7901 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7902 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7903 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7904 int inter_int = INTEGRAL_TYPE_P (inter_type);
7905 int inter_ptr = POINTER_TYPE_P (inter_type);
7906 int inter_float = FLOAT_TYPE_P (inter_type);
7907 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7908 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7909 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7910 int final_int = INTEGRAL_TYPE_P (type);
7911 int final_ptr = POINTER_TYPE_P (type);
7912 int final_float = FLOAT_TYPE_P (type);
7913 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7914 unsigned int final_prec = TYPE_PRECISION (type);
7915 int final_unsignedp = TYPE_UNSIGNED (type);
7917 /* In addition to the cases of two conversions in a row
7918 handled below, if we are converting something to its own
7919 type via an object of identical or wider precision, neither
7920 conversion is needed. */
7921 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7922 && (((inter_int || inter_ptr) && final_int)
7923 || (inter_float && final_float))
7924 && inter_prec >= final_prec)
7925 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7927 /* Likewise, if the intermediate and initial types are either both
7928 float or both integer, we don't need the middle conversion if the
7929 former is wider than the latter and doesn't change the signedness
7930 (for integers). Avoid this if the final type is a pointer since
7931 then we sometimes need the middle conversion. Likewise if the
7932 final type has a precision not equal to the size of its mode. */
7933 if (((inter_int && inside_int)
7934 || (inter_float && inside_float)
7935 || (inter_vec && inside_vec))
7936 && inter_prec >= inside_prec
7937 && (inter_float || inter_vec
7938 || inter_unsignedp == inside_unsignedp)
7939 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7940 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7941 && ! final_ptr
7942 && (! final_vec || inter_prec == inside_prec))
7943 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7945 /* If we have a sign-extension of a zero-extended value, we can
7946 replace that by a single zero-extension. Likewise if the
7947 final conversion does not change precision we can drop the
7948 intermediate conversion. */
7949 if (inside_int && inter_int && final_int
7950 && ((inside_prec < inter_prec && inter_prec < final_prec
7951 && inside_unsignedp && !inter_unsignedp)
7952 || final_prec == inter_prec))
7953 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7955 /* Two conversions in a row are not needed unless:
7956 - some conversion is floating-point (overstrict for now), or
7957 - some conversion is a vector (overstrict for now), or
7958 - the intermediate type is narrower than both initial and
7959 final, or
7960 - the intermediate type and innermost type differ in signedness,
7961 and the outermost type is wider than the intermediate, or
7962 - the initial type is a pointer type and the precisions of the
7963 intermediate and final types differ, or
7964 - the final type is a pointer type and the precisions of the
7965 initial and intermediate types differ. */
7966 if (! inside_float && ! inter_float && ! final_float
7967 && ! inside_vec && ! inter_vec && ! final_vec
7968 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7969 && ! (inside_int && inter_int
7970 && inter_unsignedp != inside_unsignedp
7971 && inter_prec < final_prec)
7972 && ((inter_unsignedp && inter_prec > inside_prec)
7973 == (final_unsignedp && final_prec > inter_prec))
7974 && ! (inside_ptr && inter_prec != final_prec)
7975 && ! (final_ptr && inside_prec != inter_prec)
7976 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7977 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7978 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7981 /* Handle (T *)&A.B.C for A being of type T and B and C
7982 living at offset zero. This occurs frequently in
7983 C++ upcasting and then accessing the base. */
7984 if (TREE_CODE (op0) == ADDR_EXPR
7985 && POINTER_TYPE_P (type)
7986 && handled_component_p (TREE_OPERAND (op0, 0)))
7988 HOST_WIDE_INT bitsize, bitpos;
7989 tree offset;
7990 enum machine_mode mode;
7991 int unsignedp, volatilep;
7992 tree base = TREE_OPERAND (op0, 0);
7993 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7994 &mode, &unsignedp, &volatilep, false);
7995 /* If the reference was to a (constant) zero offset, we can use
7996 the address of the base if it has the same base type
7997 as the result type and the pointer type is unqualified. */
7998 if (! offset && bitpos == 0
7999 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8000 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8001 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8002 return fold_convert_loc (loc, type,
8003 build_fold_addr_expr_loc (loc, base));
8006 if (TREE_CODE (op0) == MODIFY_EXPR
8007 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8008 /* Detect assigning a bitfield. */
8009 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8010 && DECL_BIT_FIELD
8011 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8013 /* Don't leave an assignment inside a conversion
8014 unless assigning a bitfield. */
8015 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8016 /* First do the assignment, then return converted constant. */
8017 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8018 TREE_NO_WARNING (tem) = 1;
8019 TREE_USED (tem) = 1;
8020 return tem;
8023 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8024 constants (if x has signed type, the sign bit cannot be set
8025 in c). This folds extension into the BIT_AND_EXPR.
8026 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8027 very likely don't have maximal range for their precision and this
8028 transformation effectively doesn't preserve non-maximal ranges. */
8029 if (TREE_CODE (type) == INTEGER_TYPE
8030 && TREE_CODE (op0) == BIT_AND_EXPR
8031 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8033 tree and_expr = op0;
8034 tree and0 = TREE_OPERAND (and_expr, 0);
8035 tree and1 = TREE_OPERAND (and_expr, 1);
8036 int change = 0;
8038 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8039 || (TYPE_PRECISION (type)
8040 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8041 change = 1;
8042 else if (TYPE_PRECISION (TREE_TYPE (and1))
8043 <= HOST_BITS_PER_WIDE_INT
8044 && host_integerp (and1, 1))
8046 unsigned HOST_WIDE_INT cst;
8048 cst = tree_low_cst (and1, 1);
8049 cst &= (HOST_WIDE_INT) -1
8050 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8051 change = (cst == 0);
8052 #ifdef LOAD_EXTEND_OP
8053 if (change
8054 && !flag_syntax_only
8055 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8056 == ZERO_EXTEND))
8058 tree uns = unsigned_type_for (TREE_TYPE (and0));
8059 and0 = fold_convert_loc (loc, uns, and0);
8060 and1 = fold_convert_loc (loc, uns, and1);
8062 #endif
8064 if (change)
8066 tem = force_fit_type_double (type, tree_to_double_int (and1),
8067 0, TREE_OVERFLOW (and1));
8068 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8069 fold_convert_loc (loc, type, and0), tem);
8073 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8074 when one of the new casts will fold away. Conservatively we assume
8075 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8076 if (POINTER_TYPE_P (type)
8077 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8078 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8079 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8080 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8081 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8083 tree arg00 = TREE_OPERAND (arg0, 0);
8084 tree arg01 = TREE_OPERAND (arg0, 1);
8086 return fold_build_pointer_plus_loc
8087 (loc, fold_convert_loc (loc, type, arg00), arg01);
8090 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8091 of the same precision, and X is an integer type not narrower than
8092 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8093 if (INTEGRAL_TYPE_P (type)
8094 && TREE_CODE (op0) == BIT_NOT_EXPR
8095 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8096 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8097 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8099 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8100 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8101 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8102 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8103 fold_convert_loc (loc, type, tem));
8106 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8107 type of X and Y (integer types only). */
8108 if (INTEGRAL_TYPE_P (type)
8109 && TREE_CODE (op0) == MULT_EXPR
8110 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8111 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8113 /* Be careful not to introduce new overflows. */
8114 tree mult_type;
8115 if (TYPE_OVERFLOW_WRAPS (type))
8116 mult_type = type;
8117 else
8118 mult_type = unsigned_type_for (type);
8120 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8122 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8123 fold_convert_loc (loc, mult_type,
8124 TREE_OPERAND (op0, 0)),
8125 fold_convert_loc (loc, mult_type,
8126 TREE_OPERAND (op0, 1)));
8127 return fold_convert_loc (loc, type, tem);
8131 tem = fold_convert_const (code, type, op0);
8132 return tem ? tem : NULL_TREE;
8134 case ADDR_SPACE_CONVERT_EXPR:
8135 if (integer_zerop (arg0))
8136 return fold_convert_const (code, type, arg0);
8137 return NULL_TREE;
8139 case FIXED_CONVERT_EXPR:
8140 tem = fold_convert_const (code, type, arg0);
8141 return tem ? tem : NULL_TREE;
8143 case VIEW_CONVERT_EXPR:
8144 if (TREE_TYPE (op0) == type)
8145 return op0;
8146 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8147 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8148 type, TREE_OPERAND (op0, 0));
8149 if (TREE_CODE (op0) == MEM_REF)
8150 return fold_build2_loc (loc, MEM_REF, type,
8151 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8153 /* For integral conversions with the same precision or pointer
8154 conversions use a NOP_EXPR instead. */
8155 if ((INTEGRAL_TYPE_P (type)
8156 || POINTER_TYPE_P (type))
8157 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8158 || POINTER_TYPE_P (TREE_TYPE (op0)))
8159 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8160 return fold_convert_loc (loc, type, op0);
8162 /* Strip inner integral conversions that do not change the precision. */
8163 if (CONVERT_EXPR_P (op0)
8164 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8165 || POINTER_TYPE_P (TREE_TYPE (op0)))
8166 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8167 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8168 && (TYPE_PRECISION (TREE_TYPE (op0))
8169 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8170 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8171 type, TREE_OPERAND (op0, 0));
8173 return fold_view_convert_expr (type, op0);
8175 case NEGATE_EXPR:
8176 tem = fold_negate_expr (loc, arg0);
8177 if (tem)
8178 return fold_convert_loc (loc, type, tem);
8179 return NULL_TREE;
8181 case ABS_EXPR:
8182 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8183 return fold_abs_const (arg0, type);
8184 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8185 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8186 /* Convert fabs((double)float) into (double)fabsf(float). */
8187 else if (TREE_CODE (arg0) == NOP_EXPR
8188 && TREE_CODE (type) == REAL_TYPE)
8190 tree targ0 = strip_float_extensions (arg0);
8191 if (targ0 != arg0)
8192 return fold_convert_loc (loc, type,
8193 fold_build1_loc (loc, ABS_EXPR,
8194 TREE_TYPE (targ0),
8195 targ0));
8197 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8198 else if (TREE_CODE (arg0) == ABS_EXPR)
8199 return arg0;
8200 else if (tree_expr_nonnegative_p (arg0))
8201 return arg0;
8203 /* Strip sign ops from argument. */
8204 if (TREE_CODE (type) == REAL_TYPE)
8206 tem = fold_strip_sign_ops (arg0);
8207 if (tem)
8208 return fold_build1_loc (loc, ABS_EXPR, type,
8209 fold_convert_loc (loc, type, tem));
8211 return NULL_TREE;
8213 case CONJ_EXPR:
8214 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8215 return fold_convert_loc (loc, type, arg0);
8216 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8218 tree itype = TREE_TYPE (type);
8219 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8220 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8221 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8222 negate_expr (ipart));
8224 if (TREE_CODE (arg0) == COMPLEX_CST)
8226 tree itype = TREE_TYPE (type);
8227 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8228 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8229 return build_complex (type, rpart, negate_expr (ipart));
8231 if (TREE_CODE (arg0) == CONJ_EXPR)
8232 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8233 return NULL_TREE;
8235 case BIT_NOT_EXPR:
8236 if (TREE_CODE (arg0) == INTEGER_CST)
8237 return fold_not_const (arg0, type);
8238 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8239 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8240 /* Convert ~ (-A) to A - 1. */
8241 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8242 return fold_build2_loc (loc, MINUS_EXPR, type,
8243 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8244 build_int_cst (type, 1));
8245 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8246 else if (INTEGRAL_TYPE_P (type)
8247 && ((TREE_CODE (arg0) == MINUS_EXPR
8248 && integer_onep (TREE_OPERAND (arg0, 1)))
8249 || (TREE_CODE (arg0) == PLUS_EXPR
8250 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8251 return fold_build1_loc (loc, NEGATE_EXPR, type,
8252 fold_convert_loc (loc, type,
8253 TREE_OPERAND (arg0, 0)));
8254 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8255 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8256 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8257 fold_convert_loc (loc, type,
8258 TREE_OPERAND (arg0, 0)))))
8259 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8260 fold_convert_loc (loc, type,
8261 TREE_OPERAND (arg0, 1)));
8262 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8263 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8264 fold_convert_loc (loc, type,
8265 TREE_OPERAND (arg0, 1)))))
8266 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8267 fold_convert_loc (loc, type,
8268 TREE_OPERAND (arg0, 0)), tem);
8269 /* Perform BIT_NOT_EXPR on each element individually. */
8270 else if (TREE_CODE (arg0) == VECTOR_CST)
8272 tree *elements;
8273 tree elem;
8274 unsigned count = VECTOR_CST_NELTS (arg0), i;
8276 elements = XALLOCAVEC (tree, count);
8277 for (i = 0; i < count; i++)
8279 elem = VECTOR_CST_ELT (arg0, i);
8280 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8281 if (elem == NULL_TREE)
8282 break;
8283 elements[i] = elem;
8285 if (i == count)
8286 return build_vector (type, elements);
8289 return NULL_TREE;
8291 case TRUTH_NOT_EXPR:
8292 /* The argument to invert_truthvalue must have Boolean type. */
8293 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8294 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8296 /* Note that the operand of this must be an int
8297 and its values must be 0 or 1.
8298 ("true" is a fixed value perhaps depending on the language,
8299 but we don't handle values other than 1 correctly yet.) */
8300 tem = fold_truth_not_expr (loc, arg0);
8301 if (!tem)
8302 return NULL_TREE;
8303 return fold_convert_loc (loc, type, tem);
8305 case REALPART_EXPR:
8306 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8307 return fold_convert_loc (loc, type, arg0);
8308 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8309 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8310 TREE_OPERAND (arg0, 1));
8311 if (TREE_CODE (arg0) == COMPLEX_CST)
8312 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8313 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8315 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8316 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8317 fold_build1_loc (loc, REALPART_EXPR, itype,
8318 TREE_OPERAND (arg0, 0)),
8319 fold_build1_loc (loc, REALPART_EXPR, itype,
8320 TREE_OPERAND (arg0, 1)));
8321 return fold_convert_loc (loc, type, tem);
8323 if (TREE_CODE (arg0) == CONJ_EXPR)
8325 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8326 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8327 TREE_OPERAND (arg0, 0));
8328 return fold_convert_loc (loc, type, tem);
8330 if (TREE_CODE (arg0) == CALL_EXPR)
8332 tree fn = get_callee_fndecl (arg0);
8333 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8334 switch (DECL_FUNCTION_CODE (fn))
8336 CASE_FLT_FN (BUILT_IN_CEXPI):
8337 fn = mathfn_built_in (type, BUILT_IN_COS);
8338 if (fn)
8339 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8340 break;
8342 default:
8343 break;
8346 return NULL_TREE;
8348 case IMAGPART_EXPR:
8349 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8350 return build_zero_cst (type);
8351 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8352 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8353 TREE_OPERAND (arg0, 0));
8354 if (TREE_CODE (arg0) == COMPLEX_CST)
8355 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8356 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8358 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8359 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8360 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8361 TREE_OPERAND (arg0, 0)),
8362 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8363 TREE_OPERAND (arg0, 1)));
8364 return fold_convert_loc (loc, type, tem);
8366 if (TREE_CODE (arg0) == CONJ_EXPR)
8368 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8369 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8370 return fold_convert_loc (loc, type, negate_expr (tem));
8372 if (TREE_CODE (arg0) == CALL_EXPR)
8374 tree fn = get_callee_fndecl (arg0);
8375 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8376 switch (DECL_FUNCTION_CODE (fn))
8378 CASE_FLT_FN (BUILT_IN_CEXPI):
8379 fn = mathfn_built_in (type, BUILT_IN_SIN);
8380 if (fn)
8381 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8382 break;
8384 default:
8385 break;
8388 return NULL_TREE;
8390 case INDIRECT_REF:
8391 /* Fold *&X to X if X is an lvalue. */
8392 if (TREE_CODE (op0) == ADDR_EXPR)
8394 tree op00 = TREE_OPERAND (op0, 0);
8395 if ((TREE_CODE (op00) == VAR_DECL
8396 || TREE_CODE (op00) == PARM_DECL
8397 || TREE_CODE (op00) == RESULT_DECL)
8398 && !TREE_READONLY (op00))
8399 return op00;
8401 return NULL_TREE;
8403 case VEC_UNPACK_LO_EXPR:
8404 case VEC_UNPACK_HI_EXPR:
8405 case VEC_UNPACK_FLOAT_LO_EXPR:
8406 case VEC_UNPACK_FLOAT_HI_EXPR:
8408 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8409 tree *elts;
8410 enum tree_code subcode;
8412 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8413 if (TREE_CODE (arg0) != VECTOR_CST)
8414 return NULL_TREE;
8416 elts = XALLOCAVEC (tree, nelts * 2);
8417 if (!vec_cst_ctor_to_array (arg0, elts))
8418 return NULL_TREE;
8420 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8421 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8422 elts += nelts;
8424 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8425 subcode = NOP_EXPR;
8426 else
8427 subcode = FLOAT_EXPR;
8429 for (i = 0; i < nelts; i++)
8431 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8432 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8433 return NULL_TREE;
8436 return build_vector (type, elts);
8439 case REDUC_MIN_EXPR:
8440 case REDUC_MAX_EXPR:
8441 case REDUC_PLUS_EXPR:
8443 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8444 tree *elts;
8445 enum tree_code subcode;
8447 if (TREE_CODE (op0) != VECTOR_CST)
8448 return NULL_TREE;
8450 elts = XALLOCAVEC (tree, nelts);
8451 if (!vec_cst_ctor_to_array (op0, elts))
8452 return NULL_TREE;
8454 switch (code)
8456 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8457 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8458 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8459 default: gcc_unreachable ();
8462 for (i = 1; i < nelts; i++)
8464 elts[0] = const_binop (subcode, elts[0], elts[i]);
8465 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8466 return NULL_TREE;
8467 elts[i] = build_zero_cst (TREE_TYPE (type));
8470 return build_vector (type, elts);
8473 default:
8474 return NULL_TREE;
8475 } /* switch (code) */
8479 /* If the operation was a conversion do _not_ mark a resulting constant
8480 with TREE_OVERFLOW if the original constant was not. These conversions
8481 have implementation defined behavior and retaining the TREE_OVERFLOW
8482 flag here would confuse later passes such as VRP. */
8483 tree
8484 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8485 tree type, tree op0)
8487 tree res = fold_unary_loc (loc, code, type, op0);
8488 if (res
8489 && TREE_CODE (res) == INTEGER_CST
8490 && TREE_CODE (op0) == INTEGER_CST
8491 && CONVERT_EXPR_CODE_P (code))
8492 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8494 return res;
8497 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8498 operands OP0 and OP1. LOC is the location of the resulting expression.
8499 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8500 Return the folded expression if folding is successful. Otherwise,
8501 return NULL_TREE. */
8502 static tree
8503 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8504 tree arg0, tree arg1, tree op0, tree op1)
8506 tree tem;
8508 /* We only do these simplifications if we are optimizing. */
8509 if (!optimize)
8510 return NULL_TREE;
8512 /* Check for things like (A || B) && (A || C). We can convert this
8513 to A || (B && C). Note that either operator can be any of the four
8514 truth and/or operations and the transformation will still be
8515 valid. Also note that we only care about order for the
8516 ANDIF and ORIF operators. If B contains side effects, this
8517 might change the truth-value of A. */
8518 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8519 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8520 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8521 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8522 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8523 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8525 tree a00 = TREE_OPERAND (arg0, 0);
8526 tree a01 = TREE_OPERAND (arg0, 1);
8527 tree a10 = TREE_OPERAND (arg1, 0);
8528 tree a11 = TREE_OPERAND (arg1, 1);
8529 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8530 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8531 && (code == TRUTH_AND_EXPR
8532 || code == TRUTH_OR_EXPR));
8534 if (operand_equal_p (a00, a10, 0))
8535 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8536 fold_build2_loc (loc, code, type, a01, a11));
8537 else if (commutative && operand_equal_p (a00, a11, 0))
8538 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8539 fold_build2_loc (loc, code, type, a01, a10));
8540 else if (commutative && operand_equal_p (a01, a10, 0))
8541 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8542 fold_build2_loc (loc, code, type, a00, a11));
8544 /* This case if tricky because we must either have commutative
8545 operators or else A10 must not have side-effects. */
8547 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8548 && operand_equal_p (a01, a11, 0))
8549 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8550 fold_build2_loc (loc, code, type, a00, a10),
8551 a01);
8554 /* See if we can build a range comparison. */
8555 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8556 return tem;
8558 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8559 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8561 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8562 if (tem)
8563 return fold_build2_loc (loc, code, type, tem, arg1);
8566 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8567 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8569 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8570 if (tem)
8571 return fold_build2_loc (loc, code, type, arg0, tem);
8574 /* Check for the possibility of merging component references. If our
8575 lhs is another similar operation, try to merge its rhs with our
8576 rhs. Then try to merge our lhs and rhs. */
8577 if (TREE_CODE (arg0) == code
8578 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8579 TREE_OPERAND (arg0, 1), arg1)))
8580 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8582 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8583 return tem;
8585 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8586 && (code == TRUTH_AND_EXPR
8587 || code == TRUTH_ANDIF_EXPR
8588 || code == TRUTH_OR_EXPR
8589 || code == TRUTH_ORIF_EXPR))
8591 enum tree_code ncode, icode;
8593 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8594 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8595 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8597 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8598 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8599 We don't want to pack more than two leafs to a non-IF AND/OR
8600 expression.
8601 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8602 equal to IF-CODE, then we don't want to add right-hand operand.
8603 If the inner right-hand side of left-hand operand has
8604 side-effects, or isn't simple, then we can't add to it,
8605 as otherwise we might destroy if-sequence. */
8606 if (TREE_CODE (arg0) == icode
8607 && simple_operand_p_2 (arg1)
8608 /* Needed for sequence points to handle trappings, and
8609 side-effects. */
8610 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8612 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8613 arg1);
8614 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8615 tem);
8617 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8618 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8619 else if (TREE_CODE (arg1) == icode
8620 && simple_operand_p_2 (arg0)
8621 /* Needed for sequence points to handle trappings, and
8622 side-effects. */
8623 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8625 tem = fold_build2_loc (loc, ncode, type,
8626 arg0, TREE_OPERAND (arg1, 0));
8627 return fold_build2_loc (loc, icode, type, tem,
8628 TREE_OPERAND (arg1, 1));
8630 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8631 into (A OR B).
8632 For sequence point consistancy, we need to check for trapping,
8633 and side-effects. */
8634 else if (code == icode && simple_operand_p_2 (arg0)
8635 && simple_operand_p_2 (arg1))
8636 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8639 return NULL_TREE;
8642 /* Fold a binary expression of code CODE and type TYPE with operands
8643 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8644 Return the folded expression if folding is successful. Otherwise,
8645 return NULL_TREE. */
8647 static tree
8648 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8650 enum tree_code compl_code;
8652 if (code == MIN_EXPR)
8653 compl_code = MAX_EXPR;
8654 else if (code == MAX_EXPR)
8655 compl_code = MIN_EXPR;
8656 else
8657 gcc_unreachable ();
8659 /* MIN (MAX (a, b), b) == b. */
8660 if (TREE_CODE (op0) == compl_code
8661 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8662 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8664 /* MIN (MAX (b, a), b) == b. */
8665 if (TREE_CODE (op0) == compl_code
8666 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8667 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8668 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8670 /* MIN (a, MAX (a, b)) == a. */
8671 if (TREE_CODE (op1) == compl_code
8672 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8673 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8674 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8676 /* MIN (a, MAX (b, a)) == a. */
8677 if (TREE_CODE (op1) == compl_code
8678 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8679 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8680 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8682 return NULL_TREE;
8685 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8686 by changing CODE to reduce the magnitude of constants involved in
8687 ARG0 of the comparison.
8688 Returns a canonicalized comparison tree if a simplification was
8689 possible, otherwise returns NULL_TREE.
8690 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8691 valid if signed overflow is undefined. */
8693 static tree
8694 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8695 tree arg0, tree arg1,
8696 bool *strict_overflow_p)
8698 enum tree_code code0 = TREE_CODE (arg0);
8699 tree t, cst0 = NULL_TREE;
8700 int sgn0;
8701 bool swap = false;
8703 /* Match A +- CST code arg1 and CST code arg1. We can change the
8704 first form only if overflow is undefined. */
8705 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8706 /* In principle pointers also have undefined overflow behavior,
8707 but that causes problems elsewhere. */
8708 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8709 && (code0 == MINUS_EXPR
8710 || code0 == PLUS_EXPR)
8711 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8712 || code0 == INTEGER_CST))
8713 return NULL_TREE;
8715 /* Identify the constant in arg0 and its sign. */
8716 if (code0 == INTEGER_CST)
8717 cst0 = arg0;
8718 else
8719 cst0 = TREE_OPERAND (arg0, 1);
8720 sgn0 = tree_int_cst_sgn (cst0);
8722 /* Overflowed constants and zero will cause problems. */
8723 if (integer_zerop (cst0)
8724 || TREE_OVERFLOW (cst0))
8725 return NULL_TREE;
8727 /* See if we can reduce the magnitude of the constant in
8728 arg0 by changing the comparison code. */
8729 if (code0 == INTEGER_CST)
8731 /* CST <= arg1 -> CST-1 < arg1. */
8732 if (code == LE_EXPR && sgn0 == 1)
8733 code = LT_EXPR;
8734 /* -CST < arg1 -> -CST-1 <= arg1. */
8735 else if (code == LT_EXPR && sgn0 == -1)
8736 code = LE_EXPR;
8737 /* CST > arg1 -> CST-1 >= arg1. */
8738 else if (code == GT_EXPR && sgn0 == 1)
8739 code = GE_EXPR;
8740 /* -CST >= arg1 -> -CST-1 > arg1. */
8741 else if (code == GE_EXPR && sgn0 == -1)
8742 code = GT_EXPR;
8743 else
8744 return NULL_TREE;
8745 /* arg1 code' CST' might be more canonical. */
8746 swap = true;
8748 else
8750 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8751 if (code == LT_EXPR
8752 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8753 code = LE_EXPR;
8754 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8755 else if (code == GT_EXPR
8756 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8757 code = GE_EXPR;
8758 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8759 else if (code == LE_EXPR
8760 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8761 code = LT_EXPR;
8762 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8763 else if (code == GE_EXPR
8764 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8765 code = GT_EXPR;
8766 else
8767 return NULL_TREE;
8768 *strict_overflow_p = true;
8771 /* Now build the constant reduced in magnitude. But not if that
8772 would produce one outside of its types range. */
8773 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8774 && ((sgn0 == 1
8775 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8776 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8777 || (sgn0 == -1
8778 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8779 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8780 /* We cannot swap the comparison here as that would cause us to
8781 endlessly recurse. */
8782 return NULL_TREE;
8784 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8785 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8786 if (code0 != INTEGER_CST)
8787 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8788 t = fold_convert (TREE_TYPE (arg1), t);
8790 /* If swapping might yield to a more canonical form, do so. */
8791 if (swap)
8792 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8793 else
8794 return fold_build2_loc (loc, code, type, t, arg1);
8797 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8798 overflow further. Try to decrease the magnitude of constants involved
8799 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8800 and put sole constants at the second argument position.
8801 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8803 static tree
8804 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8805 tree arg0, tree arg1)
8807 tree t;
8808 bool strict_overflow_p;
8809 const char * const warnmsg = G_("assuming signed overflow does not occur "
8810 "when reducing constant in comparison");
8812 /* Try canonicalization by simplifying arg0. */
8813 strict_overflow_p = false;
8814 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8815 &strict_overflow_p);
8816 if (t)
8818 if (strict_overflow_p)
8819 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8820 return t;
8823 /* Try canonicalization by simplifying arg1 using the swapped
8824 comparison. */
8825 code = swap_tree_comparison (code);
8826 strict_overflow_p = false;
8827 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8828 &strict_overflow_p);
8829 if (t && strict_overflow_p)
8830 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8831 return t;
8834 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8835 space. This is used to avoid issuing overflow warnings for
8836 expressions like &p->x which can not wrap. */
8838 static bool
8839 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8841 double_int di_offset, total;
8843 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8844 return true;
8846 if (bitpos < 0)
8847 return true;
8849 if (offset == NULL_TREE)
8850 di_offset = double_int_zero;
8851 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8852 return true;
8853 else
8854 di_offset = TREE_INT_CST (offset);
8856 bool overflow;
8857 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8858 total = di_offset.add_with_sign (units, true, &overflow);
8859 if (overflow)
8860 return true;
8862 if (total.high != 0)
8863 return true;
8865 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8866 if (size <= 0)
8867 return true;
8869 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8870 array. */
8871 if (TREE_CODE (base) == ADDR_EXPR)
8873 HOST_WIDE_INT base_size;
8875 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8876 if (base_size > 0 && size < base_size)
8877 size = base_size;
8880 return total.low > (unsigned HOST_WIDE_INT) size;
8883 /* Subroutine of fold_binary. This routine performs all of the
8884 transformations that are common to the equality/inequality
8885 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8886 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8887 fold_binary should call fold_binary. Fold a comparison with
8888 tree code CODE and type TYPE with operands OP0 and OP1. Return
8889 the folded comparison or NULL_TREE. */
8891 static tree
8892 fold_comparison (location_t loc, enum tree_code code, tree type,
8893 tree op0, tree op1)
8895 tree arg0, arg1, tem;
8897 arg0 = op0;
8898 arg1 = op1;
8900 STRIP_SIGN_NOPS (arg0);
8901 STRIP_SIGN_NOPS (arg1);
8903 tem = fold_relational_const (code, type, arg0, arg1);
8904 if (tem != NULL_TREE)
8905 return tem;
8907 /* If one arg is a real or integer constant, put it last. */
8908 if (tree_swap_operands_p (arg0, arg1, true))
8909 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8911 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8912 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8913 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8914 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8915 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8916 && (TREE_CODE (arg1) == INTEGER_CST
8917 && !TREE_OVERFLOW (arg1)))
8919 tree const1 = TREE_OPERAND (arg0, 1);
8920 tree const2 = arg1;
8921 tree variable = TREE_OPERAND (arg0, 0);
8922 tree lhs;
8923 int lhs_add;
8924 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8926 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8927 TREE_TYPE (arg1), const2, const1);
8929 /* If the constant operation overflowed this can be
8930 simplified as a comparison against INT_MAX/INT_MIN. */
8931 if (TREE_CODE (lhs) == INTEGER_CST
8932 && TREE_OVERFLOW (lhs)
8933 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8935 int const1_sgn = tree_int_cst_sgn (const1);
8936 enum tree_code code2 = code;
8938 /* Get the sign of the constant on the lhs if the
8939 operation were VARIABLE + CONST1. */
8940 if (TREE_CODE (arg0) == MINUS_EXPR)
8941 const1_sgn = -const1_sgn;
8943 /* The sign of the constant determines if we overflowed
8944 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8945 Canonicalize to the INT_MIN overflow by swapping the comparison
8946 if necessary. */
8947 if (const1_sgn == -1)
8948 code2 = swap_tree_comparison (code);
8950 /* We now can look at the canonicalized case
8951 VARIABLE + 1 CODE2 INT_MIN
8952 and decide on the result. */
8953 if (code2 == LT_EXPR
8954 || code2 == LE_EXPR
8955 || code2 == EQ_EXPR)
8956 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8957 else if (code2 == NE_EXPR
8958 || code2 == GE_EXPR
8959 || code2 == GT_EXPR)
8960 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8963 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8964 && (TREE_CODE (lhs) != INTEGER_CST
8965 || !TREE_OVERFLOW (lhs)))
8967 if (code != EQ_EXPR && code != NE_EXPR)
8968 fold_overflow_warning ("assuming signed overflow does not occur "
8969 "when changing X +- C1 cmp C2 to "
8970 "X cmp C1 +- C2",
8971 WARN_STRICT_OVERFLOW_COMPARISON);
8972 return fold_build2_loc (loc, code, type, variable, lhs);
8976 /* For comparisons of pointers we can decompose it to a compile time
8977 comparison of the base objects and the offsets into the object.
8978 This requires at least one operand being an ADDR_EXPR or a
8979 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8980 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8981 && (TREE_CODE (arg0) == ADDR_EXPR
8982 || TREE_CODE (arg1) == ADDR_EXPR
8983 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8984 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8986 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8987 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8988 enum machine_mode mode;
8989 int volatilep, unsignedp;
8990 bool indirect_base0 = false, indirect_base1 = false;
8992 /* Get base and offset for the access. Strip ADDR_EXPR for
8993 get_inner_reference, but put it back by stripping INDIRECT_REF
8994 off the base object if possible. indirect_baseN will be true
8995 if baseN is not an address but refers to the object itself. */
8996 base0 = arg0;
8997 if (TREE_CODE (arg0) == ADDR_EXPR)
8999 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9000 &bitsize, &bitpos0, &offset0, &mode,
9001 &unsignedp, &volatilep, false);
9002 if (TREE_CODE (base0) == INDIRECT_REF)
9003 base0 = TREE_OPERAND (base0, 0);
9004 else
9005 indirect_base0 = true;
9007 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9009 base0 = TREE_OPERAND (arg0, 0);
9010 STRIP_SIGN_NOPS (base0);
9011 if (TREE_CODE (base0) == ADDR_EXPR)
9013 base0 = TREE_OPERAND (base0, 0);
9014 indirect_base0 = true;
9016 offset0 = TREE_OPERAND (arg0, 1);
9017 if (host_integerp (offset0, 0))
9019 HOST_WIDE_INT off = size_low_cst (offset0);
9020 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9021 * BITS_PER_UNIT)
9022 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9024 bitpos0 = off * BITS_PER_UNIT;
9025 offset0 = NULL_TREE;
9030 base1 = arg1;
9031 if (TREE_CODE (arg1) == ADDR_EXPR)
9033 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9034 &bitsize, &bitpos1, &offset1, &mode,
9035 &unsignedp, &volatilep, false);
9036 if (TREE_CODE (base1) == INDIRECT_REF)
9037 base1 = TREE_OPERAND (base1, 0);
9038 else
9039 indirect_base1 = true;
9041 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9043 base1 = TREE_OPERAND (arg1, 0);
9044 STRIP_SIGN_NOPS (base1);
9045 if (TREE_CODE (base1) == ADDR_EXPR)
9047 base1 = TREE_OPERAND (base1, 0);
9048 indirect_base1 = true;
9050 offset1 = TREE_OPERAND (arg1, 1);
9051 if (host_integerp (offset1, 0))
9053 HOST_WIDE_INT off = size_low_cst (offset1);
9054 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9055 * BITS_PER_UNIT)
9056 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9058 bitpos1 = off * BITS_PER_UNIT;
9059 offset1 = NULL_TREE;
9064 /* A local variable can never be pointed to by
9065 the default SSA name of an incoming parameter. */
9066 if ((TREE_CODE (arg0) == ADDR_EXPR
9067 && indirect_base0
9068 && TREE_CODE (base0) == VAR_DECL
9069 && auto_var_in_fn_p (base0, current_function_decl)
9070 && !indirect_base1
9071 && TREE_CODE (base1) == SSA_NAME
9072 && SSA_NAME_IS_DEFAULT_DEF (base1)
9073 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9074 || (TREE_CODE (arg1) == ADDR_EXPR
9075 && indirect_base1
9076 && TREE_CODE (base1) == VAR_DECL
9077 && auto_var_in_fn_p (base1, current_function_decl)
9078 && !indirect_base0
9079 && TREE_CODE (base0) == SSA_NAME
9080 && SSA_NAME_IS_DEFAULT_DEF (base0)
9081 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9083 if (code == NE_EXPR)
9084 return constant_boolean_node (1, type);
9085 else if (code == EQ_EXPR)
9086 return constant_boolean_node (0, type);
9088 /* If we have equivalent bases we might be able to simplify. */
9089 else if (indirect_base0 == indirect_base1
9090 && operand_equal_p (base0, base1, 0))
9092 /* We can fold this expression to a constant if the non-constant
9093 offset parts are equal. */
9094 if ((offset0 == offset1
9095 || (offset0 && offset1
9096 && operand_equal_p (offset0, offset1, 0)))
9097 && (code == EQ_EXPR
9098 || code == NE_EXPR
9099 || (indirect_base0 && DECL_P (base0))
9100 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9103 if (code != EQ_EXPR
9104 && code != NE_EXPR
9105 && bitpos0 != bitpos1
9106 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9107 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9108 fold_overflow_warning (("assuming pointer wraparound does not "
9109 "occur when comparing P +- C1 with "
9110 "P +- C2"),
9111 WARN_STRICT_OVERFLOW_CONDITIONAL);
9113 switch (code)
9115 case EQ_EXPR:
9116 return constant_boolean_node (bitpos0 == bitpos1, type);
9117 case NE_EXPR:
9118 return constant_boolean_node (bitpos0 != bitpos1, type);
9119 case LT_EXPR:
9120 return constant_boolean_node (bitpos0 < bitpos1, type);
9121 case LE_EXPR:
9122 return constant_boolean_node (bitpos0 <= bitpos1, type);
9123 case GE_EXPR:
9124 return constant_boolean_node (bitpos0 >= bitpos1, type);
9125 case GT_EXPR:
9126 return constant_boolean_node (bitpos0 > bitpos1, type);
9127 default:;
9130 /* We can simplify the comparison to a comparison of the variable
9131 offset parts if the constant offset parts are equal.
9132 Be careful to use signed sizetype here because otherwise we
9133 mess with array offsets in the wrong way. This is possible
9134 because pointer arithmetic is restricted to retain within an
9135 object and overflow on pointer differences is undefined as of
9136 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9137 else if (bitpos0 == bitpos1
9138 && ((code == EQ_EXPR || code == NE_EXPR)
9139 || (indirect_base0 && DECL_P (base0))
9140 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9142 /* By converting to signed sizetype we cover middle-end pointer
9143 arithmetic which operates on unsigned pointer types of size
9144 type size and ARRAY_REF offsets which are properly sign or
9145 zero extended from their type in case it is narrower than
9146 sizetype. */
9147 if (offset0 == NULL_TREE)
9148 offset0 = build_int_cst (ssizetype, 0);
9149 else
9150 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9151 if (offset1 == NULL_TREE)
9152 offset1 = build_int_cst (ssizetype, 0);
9153 else
9154 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9156 if (code != EQ_EXPR
9157 && code != NE_EXPR
9158 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9159 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9160 fold_overflow_warning (("assuming pointer wraparound does not "
9161 "occur when comparing P +- C1 with "
9162 "P +- C2"),
9163 WARN_STRICT_OVERFLOW_COMPARISON);
9165 return fold_build2_loc (loc, code, type, offset0, offset1);
9168 /* For non-equal bases we can simplify if they are addresses
9169 of local binding decls or constants. */
9170 else if (indirect_base0 && indirect_base1
9171 /* We know that !operand_equal_p (base0, base1, 0)
9172 because the if condition was false. But make
9173 sure two decls are not the same. */
9174 && base0 != base1
9175 && TREE_CODE (arg0) == ADDR_EXPR
9176 && TREE_CODE (arg1) == ADDR_EXPR
9177 && (((TREE_CODE (base0) == VAR_DECL
9178 || TREE_CODE (base0) == PARM_DECL)
9179 && (targetm.binds_local_p (base0)
9180 || CONSTANT_CLASS_P (base1)))
9181 || CONSTANT_CLASS_P (base0))
9182 && (((TREE_CODE (base1) == VAR_DECL
9183 || TREE_CODE (base1) == PARM_DECL)
9184 && (targetm.binds_local_p (base1)
9185 || CONSTANT_CLASS_P (base0)))
9186 || CONSTANT_CLASS_P (base1)))
9188 if (code == EQ_EXPR)
9189 return omit_two_operands_loc (loc, type, boolean_false_node,
9190 arg0, arg1);
9191 else if (code == NE_EXPR)
9192 return omit_two_operands_loc (loc, type, boolean_true_node,
9193 arg0, arg1);
9195 /* For equal offsets we can simplify to a comparison of the
9196 base addresses. */
9197 else if (bitpos0 == bitpos1
9198 && (indirect_base0
9199 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9200 && (indirect_base1
9201 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9202 && ((offset0 == offset1)
9203 || (offset0 && offset1
9204 && operand_equal_p (offset0, offset1, 0))))
9206 if (indirect_base0)
9207 base0 = build_fold_addr_expr_loc (loc, base0);
9208 if (indirect_base1)
9209 base1 = build_fold_addr_expr_loc (loc, base1);
9210 return fold_build2_loc (loc, code, type, base0, base1);
9214 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9215 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9216 the resulting offset is smaller in absolute value than the
9217 original one and has the same sign. */
9218 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9219 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9220 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9221 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9222 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9223 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9224 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9226 tree const1 = TREE_OPERAND (arg0, 1);
9227 tree const2 = TREE_OPERAND (arg1, 1);
9228 tree variable1 = TREE_OPERAND (arg0, 0);
9229 tree variable2 = TREE_OPERAND (arg1, 0);
9230 tree cst;
9231 const char * const warnmsg = G_("assuming signed overflow does not "
9232 "occur when combining constants around "
9233 "a comparison");
9235 /* Put the constant on the side where it doesn't overflow and is
9236 of lower absolute value and of same sign than before. */
9237 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9238 ? MINUS_EXPR : PLUS_EXPR,
9239 const2, const1);
9240 if (!TREE_OVERFLOW (cst)
9241 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9242 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9244 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9245 return fold_build2_loc (loc, code, type,
9246 variable1,
9247 fold_build2_loc (loc, TREE_CODE (arg1),
9248 TREE_TYPE (arg1),
9249 variable2, cst));
9252 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9253 ? MINUS_EXPR : PLUS_EXPR,
9254 const1, const2);
9255 if (!TREE_OVERFLOW (cst)
9256 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9257 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9259 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9260 return fold_build2_loc (loc, code, type,
9261 fold_build2_loc (loc, TREE_CODE (arg0),
9262 TREE_TYPE (arg0),
9263 variable1, cst),
9264 variable2);
9268 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9269 signed arithmetic case. That form is created by the compiler
9270 often enough for folding it to be of value. One example is in
9271 computing loop trip counts after Operator Strength Reduction. */
9272 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9273 && TREE_CODE (arg0) == MULT_EXPR
9274 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9275 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9276 && integer_zerop (arg1))
9278 tree const1 = TREE_OPERAND (arg0, 1);
9279 tree const2 = arg1; /* zero */
9280 tree variable1 = TREE_OPERAND (arg0, 0);
9281 enum tree_code cmp_code = code;
9283 /* Handle unfolded multiplication by zero. */
9284 if (integer_zerop (const1))
9285 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9287 fold_overflow_warning (("assuming signed overflow does not occur when "
9288 "eliminating multiplication in comparison "
9289 "with zero"),
9290 WARN_STRICT_OVERFLOW_COMPARISON);
9292 /* If const1 is negative we swap the sense of the comparison. */
9293 if (tree_int_cst_sgn (const1) < 0)
9294 cmp_code = swap_tree_comparison (cmp_code);
9296 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9299 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9300 if (tem)
9301 return tem;
9303 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9305 tree targ0 = strip_float_extensions (arg0);
9306 tree targ1 = strip_float_extensions (arg1);
9307 tree newtype = TREE_TYPE (targ0);
9309 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9310 newtype = TREE_TYPE (targ1);
9312 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9313 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9314 return fold_build2_loc (loc, code, type,
9315 fold_convert_loc (loc, newtype, targ0),
9316 fold_convert_loc (loc, newtype, targ1));
9318 /* (-a) CMP (-b) -> b CMP a */
9319 if (TREE_CODE (arg0) == NEGATE_EXPR
9320 && TREE_CODE (arg1) == NEGATE_EXPR)
9321 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9322 TREE_OPERAND (arg0, 0));
9324 if (TREE_CODE (arg1) == REAL_CST)
9326 REAL_VALUE_TYPE cst;
9327 cst = TREE_REAL_CST (arg1);
9329 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9330 if (TREE_CODE (arg0) == NEGATE_EXPR)
9331 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9332 TREE_OPERAND (arg0, 0),
9333 build_real (TREE_TYPE (arg1),
9334 real_value_negate (&cst)));
9336 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9337 /* a CMP (-0) -> a CMP 0 */
9338 if (REAL_VALUE_MINUS_ZERO (cst))
9339 return fold_build2_loc (loc, code, type, arg0,
9340 build_real (TREE_TYPE (arg1), dconst0));
9342 /* x != NaN is always true, other ops are always false. */
9343 if (REAL_VALUE_ISNAN (cst)
9344 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9346 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9347 return omit_one_operand_loc (loc, type, tem, arg0);
9350 /* Fold comparisons against infinity. */
9351 if (REAL_VALUE_ISINF (cst)
9352 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9354 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9355 if (tem != NULL_TREE)
9356 return tem;
9360 /* If this is a comparison of a real constant with a PLUS_EXPR
9361 or a MINUS_EXPR of a real constant, we can convert it into a
9362 comparison with a revised real constant as long as no overflow
9363 occurs when unsafe_math_optimizations are enabled. */
9364 if (flag_unsafe_math_optimizations
9365 && TREE_CODE (arg1) == REAL_CST
9366 && (TREE_CODE (arg0) == PLUS_EXPR
9367 || TREE_CODE (arg0) == MINUS_EXPR)
9368 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9369 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9370 ? MINUS_EXPR : PLUS_EXPR,
9371 arg1, TREE_OPERAND (arg0, 1)))
9372 && !TREE_OVERFLOW (tem))
9373 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9375 /* Likewise, we can simplify a comparison of a real constant with
9376 a MINUS_EXPR whose first operand is also a real constant, i.e.
9377 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9378 floating-point types only if -fassociative-math is set. */
9379 if (flag_associative_math
9380 && TREE_CODE (arg1) == REAL_CST
9381 && TREE_CODE (arg0) == MINUS_EXPR
9382 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9383 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9384 arg1))
9385 && !TREE_OVERFLOW (tem))
9386 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9387 TREE_OPERAND (arg0, 1), tem);
9389 /* Fold comparisons against built-in math functions. */
9390 if (TREE_CODE (arg1) == REAL_CST
9391 && flag_unsafe_math_optimizations
9392 && ! flag_errno_math)
9394 enum built_in_function fcode = builtin_mathfn_code (arg0);
9396 if (fcode != END_BUILTINS)
9398 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9399 if (tem != NULL_TREE)
9400 return tem;
9405 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9406 && CONVERT_EXPR_P (arg0))
9408 /* If we are widening one operand of an integer comparison,
9409 see if the other operand is similarly being widened. Perhaps we
9410 can do the comparison in the narrower type. */
9411 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9412 if (tem)
9413 return tem;
9415 /* Or if we are changing signedness. */
9416 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9417 if (tem)
9418 return tem;
9421 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9422 constant, we can simplify it. */
9423 if (TREE_CODE (arg1) == INTEGER_CST
9424 && (TREE_CODE (arg0) == MIN_EXPR
9425 || TREE_CODE (arg0) == MAX_EXPR)
9426 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9428 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9429 if (tem)
9430 return tem;
9433 /* Simplify comparison of something with itself. (For IEEE
9434 floating-point, we can only do some of these simplifications.) */
9435 if (operand_equal_p (arg0, arg1, 0))
9437 switch (code)
9439 case EQ_EXPR:
9440 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9441 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9442 return constant_boolean_node (1, type);
9443 break;
9445 case GE_EXPR:
9446 case LE_EXPR:
9447 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9448 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9449 return constant_boolean_node (1, type);
9450 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9452 case NE_EXPR:
9453 /* For NE, we can only do this simplification if integer
9454 or we don't honor IEEE floating point NaNs. */
9455 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9456 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9457 break;
9458 /* ... fall through ... */
9459 case GT_EXPR:
9460 case LT_EXPR:
9461 return constant_boolean_node (0, type);
9462 default:
9463 gcc_unreachable ();
9467 /* If we are comparing an expression that just has comparisons
9468 of two integer values, arithmetic expressions of those comparisons,
9469 and constants, we can simplify it. There are only three cases
9470 to check: the two values can either be equal, the first can be
9471 greater, or the second can be greater. Fold the expression for
9472 those three values. Since each value must be 0 or 1, we have
9473 eight possibilities, each of which corresponds to the constant 0
9474 or 1 or one of the six possible comparisons.
9476 This handles common cases like (a > b) == 0 but also handles
9477 expressions like ((x > y) - (y > x)) > 0, which supposedly
9478 occur in macroized code. */
9480 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9482 tree cval1 = 0, cval2 = 0;
9483 int save_p = 0;
9485 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9486 /* Don't handle degenerate cases here; they should already
9487 have been handled anyway. */
9488 && cval1 != 0 && cval2 != 0
9489 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9490 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9491 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9492 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9493 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9494 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9495 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9497 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9498 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9500 /* We can't just pass T to eval_subst in case cval1 or cval2
9501 was the same as ARG1. */
9503 tree high_result
9504 = fold_build2_loc (loc, code, type,
9505 eval_subst (loc, arg0, cval1, maxval,
9506 cval2, minval),
9507 arg1);
9508 tree equal_result
9509 = fold_build2_loc (loc, code, type,
9510 eval_subst (loc, arg0, cval1, maxval,
9511 cval2, maxval),
9512 arg1);
9513 tree low_result
9514 = fold_build2_loc (loc, code, type,
9515 eval_subst (loc, arg0, cval1, minval,
9516 cval2, maxval),
9517 arg1);
9519 /* All three of these results should be 0 or 1. Confirm they are.
9520 Then use those values to select the proper code to use. */
9522 if (TREE_CODE (high_result) == INTEGER_CST
9523 && TREE_CODE (equal_result) == INTEGER_CST
9524 && TREE_CODE (low_result) == INTEGER_CST)
9526 /* Make a 3-bit mask with the high-order bit being the
9527 value for `>', the next for '=', and the low for '<'. */
9528 switch ((integer_onep (high_result) * 4)
9529 + (integer_onep (equal_result) * 2)
9530 + integer_onep (low_result))
9532 case 0:
9533 /* Always false. */
9534 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9535 case 1:
9536 code = LT_EXPR;
9537 break;
9538 case 2:
9539 code = EQ_EXPR;
9540 break;
9541 case 3:
9542 code = LE_EXPR;
9543 break;
9544 case 4:
9545 code = GT_EXPR;
9546 break;
9547 case 5:
9548 code = NE_EXPR;
9549 break;
9550 case 6:
9551 code = GE_EXPR;
9552 break;
9553 case 7:
9554 /* Always true. */
9555 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9558 if (save_p)
9560 tem = save_expr (build2 (code, type, cval1, cval2));
9561 SET_EXPR_LOCATION (tem, loc);
9562 return tem;
9564 return fold_build2_loc (loc, code, type, cval1, cval2);
9569 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9570 into a single range test. */
9571 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9572 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9573 && TREE_CODE (arg1) == INTEGER_CST
9574 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9575 && !integer_zerop (TREE_OPERAND (arg0, 1))
9576 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9577 && !TREE_OVERFLOW (arg1))
9579 tem = fold_div_compare (loc, code, type, arg0, arg1);
9580 if (tem != NULL_TREE)
9581 return tem;
9584 /* Fold ~X op ~Y as Y op X. */
9585 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9586 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9588 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9589 return fold_build2_loc (loc, code, type,
9590 fold_convert_loc (loc, cmp_type,
9591 TREE_OPERAND (arg1, 0)),
9592 TREE_OPERAND (arg0, 0));
9595 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9596 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9597 && TREE_CODE (arg1) == INTEGER_CST)
9599 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9600 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9601 TREE_OPERAND (arg0, 0),
9602 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9603 fold_convert_loc (loc, cmp_type, arg1)));
9606 return NULL_TREE;
9610 /* Subroutine of fold_binary. Optimize complex multiplications of the
9611 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9612 argument EXPR represents the expression "z" of type TYPE. */
9614 static tree
9615 fold_mult_zconjz (location_t loc, tree type, tree expr)
9617 tree itype = TREE_TYPE (type);
9618 tree rpart, ipart, tem;
9620 if (TREE_CODE (expr) == COMPLEX_EXPR)
9622 rpart = TREE_OPERAND (expr, 0);
9623 ipart = TREE_OPERAND (expr, 1);
9625 else if (TREE_CODE (expr) == COMPLEX_CST)
9627 rpart = TREE_REALPART (expr);
9628 ipart = TREE_IMAGPART (expr);
9630 else
9632 expr = save_expr (expr);
9633 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9634 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9637 rpart = save_expr (rpart);
9638 ipart = save_expr (ipart);
9639 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9640 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9641 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9642 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9643 build_zero_cst (itype));
9647 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9648 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9649 guarantees that P and N have the same least significant log2(M) bits.
9650 N is not otherwise constrained. In particular, N is not normalized to
9651 0 <= N < M as is common. In general, the precise value of P is unknown.
9652 M is chosen as large as possible such that constant N can be determined.
9654 Returns M and sets *RESIDUE to N.
9656 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9657 account. This is not always possible due to PR 35705.
9660 static unsigned HOST_WIDE_INT
9661 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9662 bool allow_func_align)
9664 enum tree_code code;
9666 *residue = 0;
9668 code = TREE_CODE (expr);
9669 if (code == ADDR_EXPR)
9671 unsigned int bitalign;
9672 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9673 *residue /= BITS_PER_UNIT;
9674 return bitalign / BITS_PER_UNIT;
9676 else if (code == POINTER_PLUS_EXPR)
9678 tree op0, op1;
9679 unsigned HOST_WIDE_INT modulus;
9680 enum tree_code inner_code;
9682 op0 = TREE_OPERAND (expr, 0);
9683 STRIP_NOPS (op0);
9684 modulus = get_pointer_modulus_and_residue (op0, residue,
9685 allow_func_align);
9687 op1 = TREE_OPERAND (expr, 1);
9688 STRIP_NOPS (op1);
9689 inner_code = TREE_CODE (op1);
9690 if (inner_code == INTEGER_CST)
9692 *residue += TREE_INT_CST_LOW (op1);
9693 return modulus;
9695 else if (inner_code == MULT_EXPR)
9697 op1 = TREE_OPERAND (op1, 1);
9698 if (TREE_CODE (op1) == INTEGER_CST)
9700 unsigned HOST_WIDE_INT align;
9702 /* Compute the greatest power-of-2 divisor of op1. */
9703 align = TREE_INT_CST_LOW (op1);
9704 align &= -align;
9706 /* If align is non-zero and less than *modulus, replace
9707 *modulus with align., If align is 0, then either op1 is 0
9708 or the greatest power-of-2 divisor of op1 doesn't fit in an
9709 unsigned HOST_WIDE_INT. In either case, no additional
9710 constraint is imposed. */
9711 if (align)
9712 modulus = MIN (modulus, align);
9714 return modulus;
9719 /* If we get here, we were unable to determine anything useful about the
9720 expression. */
9721 return 1;
9724 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9725 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9727 static bool
9728 vec_cst_ctor_to_array (tree arg, tree *elts)
9730 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9732 if (TREE_CODE (arg) == VECTOR_CST)
9734 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9735 elts[i] = VECTOR_CST_ELT (arg, i);
9737 else if (TREE_CODE (arg) == CONSTRUCTOR)
9739 constructor_elt *elt;
9741 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9742 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9743 return false;
9744 else
9745 elts[i] = elt->value;
9747 else
9748 return false;
9749 for (; i < nelts; i++)
9750 elts[i]
9751 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9752 return true;
9755 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9756 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9757 NULL_TREE otherwise. */
9759 static tree
9760 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9762 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9763 tree *elts;
9764 bool need_ctor = false;
9766 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9767 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9768 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9769 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9770 return NULL_TREE;
9772 elts = XALLOCAVEC (tree, nelts * 3);
9773 if (!vec_cst_ctor_to_array (arg0, elts)
9774 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9775 return NULL_TREE;
9777 for (i = 0; i < nelts; i++)
9779 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9780 need_ctor = true;
9781 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9784 if (need_ctor)
9786 vec<constructor_elt, va_gc> *v;
9787 vec_alloc (v, nelts);
9788 for (i = 0; i < nelts; i++)
9789 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9790 return build_constructor (type, v);
9792 else
9793 return build_vector (type, &elts[2 * nelts]);
9796 /* Try to fold a pointer difference of type TYPE two address expressions of
9797 array references AREF0 and AREF1 using location LOC. Return a
9798 simplified expression for the difference or NULL_TREE. */
9800 static tree
9801 fold_addr_of_array_ref_difference (location_t loc, tree type,
9802 tree aref0, tree aref1)
9804 tree base0 = TREE_OPERAND (aref0, 0);
9805 tree base1 = TREE_OPERAND (aref1, 0);
9806 tree base_offset = build_int_cst (type, 0);
9808 /* If the bases are array references as well, recurse. If the bases
9809 are pointer indirections compute the difference of the pointers.
9810 If the bases are equal, we are set. */
9811 if ((TREE_CODE (base0) == ARRAY_REF
9812 && TREE_CODE (base1) == ARRAY_REF
9813 && (base_offset
9814 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9815 || (INDIRECT_REF_P (base0)
9816 && INDIRECT_REF_P (base1)
9817 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9818 TREE_OPERAND (base0, 0),
9819 TREE_OPERAND (base1, 0))))
9820 || operand_equal_p (base0, base1, 0))
9822 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9823 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9824 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9825 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9826 return fold_build2_loc (loc, PLUS_EXPR, type,
9827 base_offset,
9828 fold_build2_loc (loc, MULT_EXPR, type,
9829 diff, esz));
9831 return NULL_TREE;
9834 /* If the real or vector real constant CST of type TYPE has an exact
9835 inverse, return it, else return NULL. */
9837 static tree
9838 exact_inverse (tree type, tree cst)
9840 REAL_VALUE_TYPE r;
9841 tree unit_type, *elts;
9842 enum machine_mode mode;
9843 unsigned vec_nelts, i;
9845 switch (TREE_CODE (cst))
9847 case REAL_CST:
9848 r = TREE_REAL_CST (cst);
9850 if (exact_real_inverse (TYPE_MODE (type), &r))
9851 return build_real (type, r);
9853 return NULL_TREE;
9855 case VECTOR_CST:
9856 vec_nelts = VECTOR_CST_NELTS (cst);
9857 elts = XALLOCAVEC (tree, vec_nelts);
9858 unit_type = TREE_TYPE (type);
9859 mode = TYPE_MODE (unit_type);
9861 for (i = 0; i < vec_nelts; i++)
9863 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9864 if (!exact_real_inverse (mode, &r))
9865 return NULL_TREE;
9866 elts[i] = build_real (unit_type, r);
9869 return build_vector (type, elts);
9871 default:
9872 return NULL_TREE;
9876 /* Mask out the tz least significant bits of X of type TYPE where
9877 tz is the number of trailing zeroes in Y. */
9878 static double_int
9879 mask_with_tz (tree type, double_int x, double_int y)
9881 int tz = y.trailing_zeros ();
9883 if (tz > 0)
9885 double_int mask;
9887 mask = ~double_int::mask (tz);
9888 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9889 return mask & x;
9891 return x;
9894 /* Fold a binary expression of code CODE and type TYPE with operands
9895 OP0 and OP1. LOC is the location of the resulting expression.
9896 Return the folded expression if folding is successful. Otherwise,
9897 return NULL_TREE. */
9899 tree
9900 fold_binary_loc (location_t loc,
9901 enum tree_code code, tree type, tree op0, tree op1)
9903 enum tree_code_class kind = TREE_CODE_CLASS (code);
9904 tree arg0, arg1, tem;
9905 tree t1 = NULL_TREE;
9906 bool strict_overflow_p;
9908 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9909 && TREE_CODE_LENGTH (code) == 2
9910 && op0 != NULL_TREE
9911 && op1 != NULL_TREE);
9913 arg0 = op0;
9914 arg1 = op1;
9916 /* Strip any conversions that don't change the mode. This is
9917 safe for every expression, except for a comparison expression
9918 because its signedness is derived from its operands. So, in
9919 the latter case, only strip conversions that don't change the
9920 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9921 preserved.
9923 Note that this is done as an internal manipulation within the
9924 constant folder, in order to find the simplest representation
9925 of the arguments so that their form can be studied. In any
9926 cases, the appropriate type conversions should be put back in
9927 the tree that will get out of the constant folder. */
9929 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9931 STRIP_SIGN_NOPS (arg0);
9932 STRIP_SIGN_NOPS (arg1);
9934 else
9936 STRIP_NOPS (arg0);
9937 STRIP_NOPS (arg1);
9940 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9941 constant but we can't do arithmetic on them. */
9942 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9943 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9944 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9945 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9946 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9947 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9948 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9950 if (kind == tcc_binary)
9952 /* Make sure type and arg0 have the same saturating flag. */
9953 gcc_assert (TYPE_SATURATING (type)
9954 == TYPE_SATURATING (TREE_TYPE (arg0)));
9955 tem = const_binop (code, arg0, arg1);
9957 else if (kind == tcc_comparison)
9958 tem = fold_relational_const (code, type, arg0, arg1);
9959 else
9960 tem = NULL_TREE;
9962 if (tem != NULL_TREE)
9964 if (TREE_TYPE (tem) != type)
9965 tem = fold_convert_loc (loc, type, tem);
9966 return tem;
9970 /* If this is a commutative operation, and ARG0 is a constant, move it
9971 to ARG1 to reduce the number of tests below. */
9972 if (commutative_tree_code (code)
9973 && tree_swap_operands_p (arg0, arg1, true))
9974 return fold_build2_loc (loc, code, type, op1, op0);
9976 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9978 First check for cases where an arithmetic operation is applied to a
9979 compound, conditional, or comparison operation. Push the arithmetic
9980 operation inside the compound or conditional to see if any folding
9981 can then be done. Convert comparison to conditional for this purpose.
9982 The also optimizes non-constant cases that used to be done in
9983 expand_expr.
9985 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9986 one of the operands is a comparison and the other is a comparison, a
9987 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9988 code below would make the expression more complex. Change it to a
9989 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9990 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9992 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9993 || code == EQ_EXPR || code == NE_EXPR)
9994 && TREE_CODE (type) != VECTOR_TYPE
9995 && ((truth_value_p (TREE_CODE (arg0))
9996 && (truth_value_p (TREE_CODE (arg1))
9997 || (TREE_CODE (arg1) == BIT_AND_EXPR
9998 && integer_onep (TREE_OPERAND (arg1, 1)))))
9999 || (truth_value_p (TREE_CODE (arg1))
10000 && (truth_value_p (TREE_CODE (arg0))
10001 || (TREE_CODE (arg0) == BIT_AND_EXPR
10002 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10004 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10005 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10006 : TRUTH_XOR_EXPR,
10007 boolean_type_node,
10008 fold_convert_loc (loc, boolean_type_node, arg0),
10009 fold_convert_loc (loc, boolean_type_node, arg1));
10011 if (code == EQ_EXPR)
10012 tem = invert_truthvalue_loc (loc, tem);
10014 return fold_convert_loc (loc, type, tem);
10017 if (TREE_CODE_CLASS (code) == tcc_binary
10018 || TREE_CODE_CLASS (code) == tcc_comparison)
10020 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10022 tem = fold_build2_loc (loc, code, type,
10023 fold_convert_loc (loc, TREE_TYPE (op0),
10024 TREE_OPERAND (arg0, 1)), op1);
10025 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10026 tem);
10028 if (TREE_CODE (arg1) == COMPOUND_EXPR
10029 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10031 tem = fold_build2_loc (loc, code, type, op0,
10032 fold_convert_loc (loc, TREE_TYPE (op1),
10033 TREE_OPERAND (arg1, 1)));
10034 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10035 tem);
10038 if (TREE_CODE (arg0) == COND_EXPR
10039 || TREE_CODE (arg0) == VEC_COND_EXPR
10040 || COMPARISON_CLASS_P (arg0))
10042 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10043 arg0, arg1,
10044 /*cond_first_p=*/1);
10045 if (tem != NULL_TREE)
10046 return tem;
10049 if (TREE_CODE (arg1) == COND_EXPR
10050 || TREE_CODE (arg1) == VEC_COND_EXPR
10051 || COMPARISON_CLASS_P (arg1))
10053 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10054 arg1, arg0,
10055 /*cond_first_p=*/0);
10056 if (tem != NULL_TREE)
10057 return tem;
10061 switch (code)
10063 case MEM_REF:
10064 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10065 if (TREE_CODE (arg0) == ADDR_EXPR
10066 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10068 tree iref = TREE_OPERAND (arg0, 0);
10069 return fold_build2 (MEM_REF, type,
10070 TREE_OPERAND (iref, 0),
10071 int_const_binop (PLUS_EXPR, arg1,
10072 TREE_OPERAND (iref, 1)));
10075 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10076 if (TREE_CODE (arg0) == ADDR_EXPR
10077 && handled_component_p (TREE_OPERAND (arg0, 0)))
10079 tree base;
10080 HOST_WIDE_INT coffset;
10081 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10082 &coffset);
10083 if (!base)
10084 return NULL_TREE;
10085 return fold_build2 (MEM_REF, type,
10086 build_fold_addr_expr (base),
10087 int_const_binop (PLUS_EXPR, arg1,
10088 size_int (coffset)));
10091 return NULL_TREE;
10093 case POINTER_PLUS_EXPR:
10094 /* 0 +p index -> (type)index */
10095 if (integer_zerop (arg0))
10096 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10098 /* PTR +p 0 -> PTR */
10099 if (integer_zerop (arg1))
10100 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10102 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10103 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10104 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10105 return fold_convert_loc (loc, type,
10106 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10107 fold_convert_loc (loc, sizetype,
10108 arg1),
10109 fold_convert_loc (loc, sizetype,
10110 arg0)));
10112 /* (PTR +p B) +p A -> PTR +p (B + A) */
10113 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10115 tree inner;
10116 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10117 tree arg00 = TREE_OPERAND (arg0, 0);
10118 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10119 arg01, fold_convert_loc (loc, sizetype, arg1));
10120 return fold_convert_loc (loc, type,
10121 fold_build_pointer_plus_loc (loc,
10122 arg00, inner));
10125 /* PTR_CST +p CST -> CST1 */
10126 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10127 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10128 fold_convert_loc (loc, type, arg1));
10130 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10131 of the array. Loop optimizer sometimes produce this type of
10132 expressions. */
10133 if (TREE_CODE (arg0) == ADDR_EXPR)
10135 tem = try_move_mult_to_index (loc, arg0,
10136 fold_convert_loc (loc,
10137 ssizetype, arg1));
10138 if (tem)
10139 return fold_convert_loc (loc, type, tem);
10142 return NULL_TREE;
10144 case PLUS_EXPR:
10145 /* A + (-B) -> A - B */
10146 if (TREE_CODE (arg1) == NEGATE_EXPR)
10147 return fold_build2_loc (loc, MINUS_EXPR, type,
10148 fold_convert_loc (loc, type, arg0),
10149 fold_convert_loc (loc, type,
10150 TREE_OPERAND (arg1, 0)));
10151 /* (-A) + B -> B - A */
10152 if (TREE_CODE (arg0) == NEGATE_EXPR
10153 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10154 return fold_build2_loc (loc, MINUS_EXPR, type,
10155 fold_convert_loc (loc, type, arg1),
10156 fold_convert_loc (loc, type,
10157 TREE_OPERAND (arg0, 0)));
10159 if (INTEGRAL_TYPE_P (type))
10161 /* Convert ~A + 1 to -A. */
10162 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10163 && integer_onep (arg1))
10164 return fold_build1_loc (loc, NEGATE_EXPR, type,
10165 fold_convert_loc (loc, type,
10166 TREE_OPERAND (arg0, 0)));
10168 /* ~X + X is -1. */
10169 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10170 && !TYPE_OVERFLOW_TRAPS (type))
10172 tree tem = TREE_OPERAND (arg0, 0);
10174 STRIP_NOPS (tem);
10175 if (operand_equal_p (tem, arg1, 0))
10177 t1 = build_int_cst_type (type, -1);
10178 return omit_one_operand_loc (loc, type, t1, arg1);
10182 /* X + ~X is -1. */
10183 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10184 && !TYPE_OVERFLOW_TRAPS (type))
10186 tree tem = TREE_OPERAND (arg1, 0);
10188 STRIP_NOPS (tem);
10189 if (operand_equal_p (arg0, tem, 0))
10191 t1 = build_int_cst_type (type, -1);
10192 return omit_one_operand_loc (loc, type, t1, arg0);
10196 /* X + (X / CST) * -CST is X % CST. */
10197 if (TREE_CODE (arg1) == MULT_EXPR
10198 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10199 && operand_equal_p (arg0,
10200 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10202 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10203 tree cst1 = TREE_OPERAND (arg1, 1);
10204 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10205 cst1, cst0);
10206 if (sum && integer_zerop (sum))
10207 return fold_convert_loc (loc, type,
10208 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10209 TREE_TYPE (arg0), arg0,
10210 cst0));
10214 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10215 one. Make sure the type is not saturating and has the signedness of
10216 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10217 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10218 if ((TREE_CODE (arg0) == MULT_EXPR
10219 || TREE_CODE (arg1) == MULT_EXPR)
10220 && !TYPE_SATURATING (type)
10221 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10222 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10223 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10225 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10226 if (tem)
10227 return tem;
10230 if (! FLOAT_TYPE_P (type))
10232 if (integer_zerop (arg1))
10233 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10235 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10236 with a constant, and the two constants have no bits in common,
10237 we should treat this as a BIT_IOR_EXPR since this may produce more
10238 simplifications. */
10239 if (TREE_CODE (arg0) == BIT_AND_EXPR
10240 && TREE_CODE (arg1) == BIT_AND_EXPR
10241 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10242 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10243 && integer_zerop (const_binop (BIT_AND_EXPR,
10244 TREE_OPERAND (arg0, 1),
10245 TREE_OPERAND (arg1, 1))))
10247 code = BIT_IOR_EXPR;
10248 goto bit_ior;
10251 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10252 (plus (plus (mult) (mult)) (foo)) so that we can
10253 take advantage of the factoring cases below. */
10254 if (TYPE_OVERFLOW_WRAPS (type)
10255 && (((TREE_CODE (arg0) == PLUS_EXPR
10256 || TREE_CODE (arg0) == MINUS_EXPR)
10257 && TREE_CODE (arg1) == MULT_EXPR)
10258 || ((TREE_CODE (arg1) == PLUS_EXPR
10259 || TREE_CODE (arg1) == MINUS_EXPR)
10260 && TREE_CODE (arg0) == MULT_EXPR)))
10262 tree parg0, parg1, parg, marg;
10263 enum tree_code pcode;
10265 if (TREE_CODE (arg1) == MULT_EXPR)
10266 parg = arg0, marg = arg1;
10267 else
10268 parg = arg1, marg = arg0;
10269 pcode = TREE_CODE (parg);
10270 parg0 = TREE_OPERAND (parg, 0);
10271 parg1 = TREE_OPERAND (parg, 1);
10272 STRIP_NOPS (parg0);
10273 STRIP_NOPS (parg1);
10275 if (TREE_CODE (parg0) == MULT_EXPR
10276 && TREE_CODE (parg1) != MULT_EXPR)
10277 return fold_build2_loc (loc, pcode, type,
10278 fold_build2_loc (loc, PLUS_EXPR, type,
10279 fold_convert_loc (loc, type,
10280 parg0),
10281 fold_convert_loc (loc, type,
10282 marg)),
10283 fold_convert_loc (loc, type, parg1));
10284 if (TREE_CODE (parg0) != MULT_EXPR
10285 && TREE_CODE (parg1) == MULT_EXPR)
10286 return
10287 fold_build2_loc (loc, PLUS_EXPR, type,
10288 fold_convert_loc (loc, type, parg0),
10289 fold_build2_loc (loc, pcode, type,
10290 fold_convert_loc (loc, type, marg),
10291 fold_convert_loc (loc, type,
10292 parg1)));
10295 else
10297 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10298 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10299 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10301 /* Likewise if the operands are reversed. */
10302 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10303 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10305 /* Convert X + -C into X - C. */
10306 if (TREE_CODE (arg1) == REAL_CST
10307 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10309 tem = fold_negate_const (arg1, type);
10310 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10311 return fold_build2_loc (loc, MINUS_EXPR, type,
10312 fold_convert_loc (loc, type, arg0),
10313 fold_convert_loc (loc, type, tem));
10316 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10317 to __complex__ ( x, y ). This is not the same for SNaNs or
10318 if signed zeros are involved. */
10319 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10320 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10321 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10323 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10324 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10325 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10326 bool arg0rz = false, arg0iz = false;
10327 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10328 || (arg0i && (arg0iz = real_zerop (arg0i))))
10330 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10331 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10332 if (arg0rz && arg1i && real_zerop (arg1i))
10334 tree rp = arg1r ? arg1r
10335 : build1 (REALPART_EXPR, rtype, arg1);
10336 tree ip = arg0i ? arg0i
10337 : build1 (IMAGPART_EXPR, rtype, arg0);
10338 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10340 else if (arg0iz && arg1r && real_zerop (arg1r))
10342 tree rp = arg0r ? arg0r
10343 : build1 (REALPART_EXPR, rtype, arg0);
10344 tree ip = arg1i ? arg1i
10345 : build1 (IMAGPART_EXPR, rtype, arg1);
10346 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10351 if (flag_unsafe_math_optimizations
10352 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10353 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10354 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10355 return tem;
10357 /* Convert x+x into x*2.0. */
10358 if (operand_equal_p (arg0, arg1, 0)
10359 && SCALAR_FLOAT_TYPE_P (type))
10360 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10361 build_real (type, dconst2));
10363 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10364 We associate floats only if the user has specified
10365 -fassociative-math. */
10366 if (flag_associative_math
10367 && TREE_CODE (arg1) == PLUS_EXPR
10368 && TREE_CODE (arg0) != MULT_EXPR)
10370 tree tree10 = TREE_OPERAND (arg1, 0);
10371 tree tree11 = TREE_OPERAND (arg1, 1);
10372 if (TREE_CODE (tree11) == MULT_EXPR
10373 && TREE_CODE (tree10) == MULT_EXPR)
10375 tree tree0;
10376 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10377 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10380 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10381 We associate floats only if the user has specified
10382 -fassociative-math. */
10383 if (flag_associative_math
10384 && TREE_CODE (arg0) == PLUS_EXPR
10385 && TREE_CODE (arg1) != MULT_EXPR)
10387 tree tree00 = TREE_OPERAND (arg0, 0);
10388 tree tree01 = TREE_OPERAND (arg0, 1);
10389 if (TREE_CODE (tree01) == MULT_EXPR
10390 && TREE_CODE (tree00) == MULT_EXPR)
10392 tree tree0;
10393 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10394 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10399 bit_rotate:
10400 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10401 is a rotate of A by C1 bits. */
10402 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10403 is a rotate of A by B bits. */
10405 enum tree_code code0, code1;
10406 tree rtype;
10407 code0 = TREE_CODE (arg0);
10408 code1 = TREE_CODE (arg1);
10409 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10410 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10411 && operand_equal_p (TREE_OPERAND (arg0, 0),
10412 TREE_OPERAND (arg1, 0), 0)
10413 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10414 TYPE_UNSIGNED (rtype))
10415 /* Only create rotates in complete modes. Other cases are not
10416 expanded properly. */
10417 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10419 tree tree01, tree11;
10420 enum tree_code code01, code11;
10422 tree01 = TREE_OPERAND (arg0, 1);
10423 tree11 = TREE_OPERAND (arg1, 1);
10424 STRIP_NOPS (tree01);
10425 STRIP_NOPS (tree11);
10426 code01 = TREE_CODE (tree01);
10427 code11 = TREE_CODE (tree11);
10428 if (code01 == INTEGER_CST
10429 && code11 == INTEGER_CST
10430 && TREE_INT_CST_HIGH (tree01) == 0
10431 && TREE_INT_CST_HIGH (tree11) == 0
10432 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10433 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10435 tem = build2_loc (loc, LROTATE_EXPR,
10436 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10437 TREE_OPERAND (arg0, 0),
10438 code0 == LSHIFT_EXPR ? tree01 : tree11);
10439 return fold_convert_loc (loc, type, tem);
10441 else if (code11 == MINUS_EXPR)
10443 tree tree110, tree111;
10444 tree110 = TREE_OPERAND (tree11, 0);
10445 tree111 = TREE_OPERAND (tree11, 1);
10446 STRIP_NOPS (tree110);
10447 STRIP_NOPS (tree111);
10448 if (TREE_CODE (tree110) == INTEGER_CST
10449 && 0 == compare_tree_int (tree110,
10450 TYPE_PRECISION
10451 (TREE_TYPE (TREE_OPERAND
10452 (arg0, 0))))
10453 && operand_equal_p (tree01, tree111, 0))
10454 return
10455 fold_convert_loc (loc, type,
10456 build2 ((code0 == LSHIFT_EXPR
10457 ? LROTATE_EXPR
10458 : RROTATE_EXPR),
10459 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10460 TREE_OPERAND (arg0, 0), tree01));
10462 else if (code01 == MINUS_EXPR)
10464 tree tree010, tree011;
10465 tree010 = TREE_OPERAND (tree01, 0);
10466 tree011 = TREE_OPERAND (tree01, 1);
10467 STRIP_NOPS (tree010);
10468 STRIP_NOPS (tree011);
10469 if (TREE_CODE (tree010) == INTEGER_CST
10470 && 0 == compare_tree_int (tree010,
10471 TYPE_PRECISION
10472 (TREE_TYPE (TREE_OPERAND
10473 (arg0, 0))))
10474 && operand_equal_p (tree11, tree011, 0))
10475 return fold_convert_loc
10476 (loc, type,
10477 build2 ((code0 != LSHIFT_EXPR
10478 ? LROTATE_EXPR
10479 : RROTATE_EXPR),
10480 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10481 TREE_OPERAND (arg0, 0), tree11));
10486 associate:
10487 /* In most languages, can't associate operations on floats through
10488 parentheses. Rather than remember where the parentheses were, we
10489 don't associate floats at all, unless the user has specified
10490 -fassociative-math.
10491 And, we need to make sure type is not saturating. */
10493 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10494 && !TYPE_SATURATING (type))
10496 tree var0, con0, lit0, minus_lit0;
10497 tree var1, con1, lit1, minus_lit1;
10498 tree atype = type;
10499 bool ok = true;
10501 /* Split both trees into variables, constants, and literals. Then
10502 associate each group together, the constants with literals,
10503 then the result with variables. This increases the chances of
10504 literals being recombined later and of generating relocatable
10505 expressions for the sum of a constant and literal. */
10506 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10507 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10508 code == MINUS_EXPR);
10510 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10511 if (code == MINUS_EXPR)
10512 code = PLUS_EXPR;
10514 /* With undefined overflow prefer doing association in a type
10515 which wraps on overflow, if that is one of the operand types. */
10516 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10517 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10519 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10520 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10521 atype = TREE_TYPE (arg0);
10522 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10523 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10524 atype = TREE_TYPE (arg1);
10525 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10528 /* With undefined overflow we can only associate constants with one
10529 variable, and constants whose association doesn't overflow. */
10530 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10531 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10533 if (var0 && var1)
10535 tree tmp0 = var0;
10536 tree tmp1 = var1;
10538 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10539 tmp0 = TREE_OPERAND (tmp0, 0);
10540 if (CONVERT_EXPR_P (tmp0)
10541 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10542 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10543 <= TYPE_PRECISION (atype)))
10544 tmp0 = TREE_OPERAND (tmp0, 0);
10545 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10546 tmp1 = TREE_OPERAND (tmp1, 0);
10547 if (CONVERT_EXPR_P (tmp1)
10548 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10549 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10550 <= TYPE_PRECISION (atype)))
10551 tmp1 = TREE_OPERAND (tmp1, 0);
10552 /* The only case we can still associate with two variables
10553 is if they are the same, modulo negation and bit-pattern
10554 preserving conversions. */
10555 if (!operand_equal_p (tmp0, tmp1, 0))
10556 ok = false;
10560 /* Only do something if we found more than two objects. Otherwise,
10561 nothing has changed and we risk infinite recursion. */
10562 if (ok
10563 && (2 < ((var0 != 0) + (var1 != 0)
10564 + (con0 != 0) + (con1 != 0)
10565 + (lit0 != 0) + (lit1 != 0)
10566 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10568 bool any_overflows = false;
10569 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10570 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10571 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10572 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10573 var0 = associate_trees (loc, var0, var1, code, atype);
10574 con0 = associate_trees (loc, con0, con1, code, atype);
10575 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10576 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10577 code, atype);
10579 /* Preserve the MINUS_EXPR if the negative part of the literal is
10580 greater than the positive part. Otherwise, the multiplicative
10581 folding code (i.e extract_muldiv) may be fooled in case
10582 unsigned constants are subtracted, like in the following
10583 example: ((X*2 + 4) - 8U)/2. */
10584 if (minus_lit0 && lit0)
10586 if (TREE_CODE (lit0) == INTEGER_CST
10587 && TREE_CODE (minus_lit0) == INTEGER_CST
10588 && tree_int_cst_lt (lit0, minus_lit0))
10590 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10591 MINUS_EXPR, atype);
10592 lit0 = 0;
10594 else
10596 lit0 = associate_trees (loc, lit0, minus_lit0,
10597 MINUS_EXPR, atype);
10598 minus_lit0 = 0;
10602 /* Don't introduce overflows through reassociation. */
10603 if (!any_overflows
10604 && ((lit0 && TREE_OVERFLOW (lit0))
10605 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10606 return NULL_TREE;
10608 if (minus_lit0)
10610 if (con0 == 0)
10611 return
10612 fold_convert_loc (loc, type,
10613 associate_trees (loc, var0, minus_lit0,
10614 MINUS_EXPR, atype));
10615 else
10617 con0 = associate_trees (loc, con0, minus_lit0,
10618 MINUS_EXPR, atype);
10619 return
10620 fold_convert_loc (loc, type,
10621 associate_trees (loc, var0, con0,
10622 PLUS_EXPR, atype));
10626 con0 = associate_trees (loc, con0, lit0, code, atype);
10627 return
10628 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10629 code, atype));
10633 return NULL_TREE;
10635 case MINUS_EXPR:
10636 /* Pointer simplifications for subtraction, simple reassociations. */
10637 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10639 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10640 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10641 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10643 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10644 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10645 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10646 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10647 return fold_build2_loc (loc, PLUS_EXPR, type,
10648 fold_build2_loc (loc, MINUS_EXPR, type,
10649 arg00, arg10),
10650 fold_build2_loc (loc, MINUS_EXPR, type,
10651 arg01, arg11));
10653 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10654 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10656 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10657 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10658 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10659 fold_convert_loc (loc, type, arg1));
10660 if (tmp)
10661 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10664 /* A - (-B) -> A + B */
10665 if (TREE_CODE (arg1) == NEGATE_EXPR)
10666 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10667 fold_convert_loc (loc, type,
10668 TREE_OPERAND (arg1, 0)));
10669 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10670 if (TREE_CODE (arg0) == NEGATE_EXPR
10671 && (FLOAT_TYPE_P (type)
10672 || INTEGRAL_TYPE_P (type))
10673 && negate_expr_p (arg1)
10674 && reorder_operands_p (arg0, arg1))
10675 return fold_build2_loc (loc, MINUS_EXPR, type,
10676 fold_convert_loc (loc, type,
10677 negate_expr (arg1)),
10678 fold_convert_loc (loc, type,
10679 TREE_OPERAND (arg0, 0)));
10680 /* Convert -A - 1 to ~A. */
10681 if (INTEGRAL_TYPE_P (type)
10682 && TREE_CODE (arg0) == NEGATE_EXPR
10683 && integer_onep (arg1)
10684 && !TYPE_OVERFLOW_TRAPS (type))
10685 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10686 fold_convert_loc (loc, type,
10687 TREE_OPERAND (arg0, 0)));
10689 /* Convert -1 - A to ~A. */
10690 if (INTEGRAL_TYPE_P (type)
10691 && integer_all_onesp (arg0))
10692 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10695 /* X - (X / CST) * CST is X % CST. */
10696 if (INTEGRAL_TYPE_P (type)
10697 && TREE_CODE (arg1) == MULT_EXPR
10698 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10699 && operand_equal_p (arg0,
10700 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10701 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10702 TREE_OPERAND (arg1, 1), 0))
10703 return
10704 fold_convert_loc (loc, type,
10705 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10706 arg0, TREE_OPERAND (arg1, 1)));
10708 if (! FLOAT_TYPE_P (type))
10710 if (integer_zerop (arg0))
10711 return negate_expr (fold_convert_loc (loc, type, arg1));
10712 if (integer_zerop (arg1))
10713 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10715 /* Fold A - (A & B) into ~B & A. */
10716 if (!TREE_SIDE_EFFECTS (arg0)
10717 && TREE_CODE (arg1) == BIT_AND_EXPR)
10719 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10721 tree arg10 = fold_convert_loc (loc, type,
10722 TREE_OPERAND (arg1, 0));
10723 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10724 fold_build1_loc (loc, BIT_NOT_EXPR,
10725 type, arg10),
10726 fold_convert_loc (loc, type, arg0));
10728 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10730 tree arg11 = fold_convert_loc (loc,
10731 type, TREE_OPERAND (arg1, 1));
10732 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10733 fold_build1_loc (loc, BIT_NOT_EXPR,
10734 type, arg11),
10735 fold_convert_loc (loc, type, arg0));
10739 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10740 any power of 2 minus 1. */
10741 if (TREE_CODE (arg0) == BIT_AND_EXPR
10742 && TREE_CODE (arg1) == BIT_AND_EXPR
10743 && operand_equal_p (TREE_OPERAND (arg0, 0),
10744 TREE_OPERAND (arg1, 0), 0))
10746 tree mask0 = TREE_OPERAND (arg0, 1);
10747 tree mask1 = TREE_OPERAND (arg1, 1);
10748 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10750 if (operand_equal_p (tem, mask1, 0))
10752 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10753 TREE_OPERAND (arg0, 0), mask1);
10754 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10759 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10760 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10761 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10763 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10764 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10765 (-ARG1 + ARG0) reduces to -ARG1. */
10766 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10767 return negate_expr (fold_convert_loc (loc, type, arg1));
10769 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10770 __complex__ ( x, -y ). This is not the same for SNaNs or if
10771 signed zeros are involved. */
10772 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10773 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10774 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10776 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10777 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10778 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10779 bool arg0rz = false, arg0iz = false;
10780 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10781 || (arg0i && (arg0iz = real_zerop (arg0i))))
10783 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10784 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10785 if (arg0rz && arg1i && real_zerop (arg1i))
10787 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10788 arg1r ? arg1r
10789 : build1 (REALPART_EXPR, rtype, arg1));
10790 tree ip = arg0i ? arg0i
10791 : build1 (IMAGPART_EXPR, rtype, arg0);
10792 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10794 else if (arg0iz && arg1r && real_zerop (arg1r))
10796 tree rp = arg0r ? arg0r
10797 : build1 (REALPART_EXPR, rtype, arg0);
10798 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10799 arg1i ? arg1i
10800 : build1 (IMAGPART_EXPR, rtype, arg1));
10801 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10806 /* Fold &x - &x. This can happen from &x.foo - &x.
10807 This is unsafe for certain floats even in non-IEEE formats.
10808 In IEEE, it is unsafe because it does wrong for NaNs.
10809 Also note that operand_equal_p is always false if an operand
10810 is volatile. */
10812 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10813 && operand_equal_p (arg0, arg1, 0))
10814 return build_zero_cst (type);
10816 /* A - B -> A + (-B) if B is easily negatable. */
10817 if (negate_expr_p (arg1)
10818 && ((FLOAT_TYPE_P (type)
10819 /* Avoid this transformation if B is a positive REAL_CST. */
10820 && (TREE_CODE (arg1) != REAL_CST
10821 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10822 || INTEGRAL_TYPE_P (type)))
10823 return fold_build2_loc (loc, PLUS_EXPR, type,
10824 fold_convert_loc (loc, type, arg0),
10825 fold_convert_loc (loc, type,
10826 negate_expr (arg1)));
10828 /* Try folding difference of addresses. */
10830 HOST_WIDE_INT diff;
10832 if ((TREE_CODE (arg0) == ADDR_EXPR
10833 || TREE_CODE (arg1) == ADDR_EXPR)
10834 && ptr_difference_const (arg0, arg1, &diff))
10835 return build_int_cst_type (type, diff);
10838 /* Fold &a[i] - &a[j] to i-j. */
10839 if (TREE_CODE (arg0) == ADDR_EXPR
10840 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10841 && TREE_CODE (arg1) == ADDR_EXPR
10842 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10844 tree tem = fold_addr_of_array_ref_difference (loc, type,
10845 TREE_OPERAND (arg0, 0),
10846 TREE_OPERAND (arg1, 0));
10847 if (tem)
10848 return tem;
10851 if (FLOAT_TYPE_P (type)
10852 && flag_unsafe_math_optimizations
10853 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10854 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10855 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10856 return tem;
10858 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10859 one. Make sure the type is not saturating and has the signedness of
10860 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10861 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10862 if ((TREE_CODE (arg0) == MULT_EXPR
10863 || TREE_CODE (arg1) == MULT_EXPR)
10864 && !TYPE_SATURATING (type)
10865 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10866 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10867 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10869 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10870 if (tem)
10871 return tem;
10874 goto associate;
10876 case MULT_EXPR:
10877 /* (-A) * (-B) -> A * B */
10878 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10879 return fold_build2_loc (loc, MULT_EXPR, type,
10880 fold_convert_loc (loc, type,
10881 TREE_OPERAND (arg0, 0)),
10882 fold_convert_loc (loc, type,
10883 negate_expr (arg1)));
10884 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10885 return fold_build2_loc (loc, MULT_EXPR, type,
10886 fold_convert_loc (loc, type,
10887 negate_expr (arg0)),
10888 fold_convert_loc (loc, type,
10889 TREE_OPERAND (arg1, 0)));
10891 if (! FLOAT_TYPE_P (type))
10893 if (integer_zerop (arg1))
10894 return omit_one_operand_loc (loc, type, arg1, arg0);
10895 if (integer_onep (arg1))
10896 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10897 /* Transform x * -1 into -x. Make sure to do the negation
10898 on the original operand with conversions not stripped
10899 because we can only strip non-sign-changing conversions. */
10900 if (integer_all_onesp (arg1))
10901 return fold_convert_loc (loc, type, negate_expr (op0));
10902 /* Transform x * -C into -x * C if x is easily negatable. */
10903 if (TREE_CODE (arg1) == INTEGER_CST
10904 && tree_int_cst_sgn (arg1) == -1
10905 && negate_expr_p (arg0)
10906 && (tem = negate_expr (arg1)) != arg1
10907 && !TREE_OVERFLOW (tem))
10908 return fold_build2_loc (loc, MULT_EXPR, type,
10909 fold_convert_loc (loc, type,
10910 negate_expr (arg0)),
10911 tem);
10913 /* (a * (1 << b)) is (a << b) */
10914 if (TREE_CODE (arg1) == LSHIFT_EXPR
10915 && integer_onep (TREE_OPERAND (arg1, 0)))
10916 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10917 TREE_OPERAND (arg1, 1));
10918 if (TREE_CODE (arg0) == LSHIFT_EXPR
10919 && integer_onep (TREE_OPERAND (arg0, 0)))
10920 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10921 TREE_OPERAND (arg0, 1));
10923 /* (A + A) * C -> A * 2 * C */
10924 if (TREE_CODE (arg0) == PLUS_EXPR
10925 && TREE_CODE (arg1) == INTEGER_CST
10926 && operand_equal_p (TREE_OPERAND (arg0, 0),
10927 TREE_OPERAND (arg0, 1), 0))
10928 return fold_build2_loc (loc, MULT_EXPR, type,
10929 omit_one_operand_loc (loc, type,
10930 TREE_OPERAND (arg0, 0),
10931 TREE_OPERAND (arg0, 1)),
10932 fold_build2_loc (loc, MULT_EXPR, type,
10933 build_int_cst (type, 2) , arg1));
10935 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10936 sign-changing only. */
10937 if (TREE_CODE (arg1) == INTEGER_CST
10938 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10939 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10940 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10942 strict_overflow_p = false;
10943 if (TREE_CODE (arg1) == INTEGER_CST
10944 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10945 &strict_overflow_p)))
10947 if (strict_overflow_p)
10948 fold_overflow_warning (("assuming signed overflow does not "
10949 "occur when simplifying "
10950 "multiplication"),
10951 WARN_STRICT_OVERFLOW_MISC);
10952 return fold_convert_loc (loc, type, tem);
10955 /* Optimize z * conj(z) for integer complex numbers. */
10956 if (TREE_CODE (arg0) == CONJ_EXPR
10957 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10958 return fold_mult_zconjz (loc, type, arg1);
10959 if (TREE_CODE (arg1) == CONJ_EXPR
10960 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10961 return fold_mult_zconjz (loc, type, arg0);
10963 else
10965 /* Maybe fold x * 0 to 0. The expressions aren't the same
10966 when x is NaN, since x * 0 is also NaN. Nor are they the
10967 same in modes with signed zeros, since multiplying a
10968 negative value by 0 gives -0, not +0. */
10969 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10970 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10971 && real_zerop (arg1))
10972 return omit_one_operand_loc (loc, type, arg1, arg0);
10973 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10974 Likewise for complex arithmetic with signed zeros. */
10975 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10976 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10977 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10978 && real_onep (arg1))
10979 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10981 /* Transform x * -1.0 into -x. */
10982 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10983 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10984 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10985 && real_minus_onep (arg1))
10986 return fold_convert_loc (loc, type, negate_expr (arg0));
10988 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10989 the result for floating point types due to rounding so it is applied
10990 only if -fassociative-math was specify. */
10991 if (flag_associative_math
10992 && TREE_CODE (arg0) == RDIV_EXPR
10993 && TREE_CODE (arg1) == REAL_CST
10994 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10996 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10997 arg1);
10998 if (tem)
10999 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11000 TREE_OPERAND (arg0, 1));
11003 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11004 if (operand_equal_p (arg0, arg1, 0))
11006 tree tem = fold_strip_sign_ops (arg0);
11007 if (tem != NULL_TREE)
11009 tem = fold_convert_loc (loc, type, tem);
11010 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11014 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11015 This is not the same for NaNs or if signed zeros are
11016 involved. */
11017 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11018 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11019 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11020 && TREE_CODE (arg1) == COMPLEX_CST
11021 && real_zerop (TREE_REALPART (arg1)))
11023 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11024 if (real_onep (TREE_IMAGPART (arg1)))
11025 return
11026 fold_build2_loc (loc, COMPLEX_EXPR, type,
11027 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11028 rtype, arg0)),
11029 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11030 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11031 return
11032 fold_build2_loc (loc, COMPLEX_EXPR, type,
11033 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11034 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11035 rtype, arg0)));
11038 /* Optimize z * conj(z) for floating point complex numbers.
11039 Guarded by flag_unsafe_math_optimizations as non-finite
11040 imaginary components don't produce scalar results. */
11041 if (flag_unsafe_math_optimizations
11042 && TREE_CODE (arg0) == CONJ_EXPR
11043 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11044 return fold_mult_zconjz (loc, type, arg1);
11045 if (flag_unsafe_math_optimizations
11046 && TREE_CODE (arg1) == CONJ_EXPR
11047 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11048 return fold_mult_zconjz (loc, type, arg0);
11050 if (flag_unsafe_math_optimizations)
11052 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11053 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11055 /* Optimizations of root(...)*root(...). */
11056 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11058 tree rootfn, arg;
11059 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11060 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11062 /* Optimize sqrt(x)*sqrt(x) as x. */
11063 if (BUILTIN_SQRT_P (fcode0)
11064 && operand_equal_p (arg00, arg10, 0)
11065 && ! HONOR_SNANS (TYPE_MODE (type)))
11066 return arg00;
11068 /* Optimize root(x)*root(y) as root(x*y). */
11069 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11070 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11071 return build_call_expr_loc (loc, rootfn, 1, arg);
11074 /* Optimize expN(x)*expN(y) as expN(x+y). */
11075 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11077 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11078 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11079 CALL_EXPR_ARG (arg0, 0),
11080 CALL_EXPR_ARG (arg1, 0));
11081 return build_call_expr_loc (loc, expfn, 1, arg);
11084 /* Optimizations of pow(...)*pow(...). */
11085 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11086 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11087 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11089 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11090 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11091 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11092 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11094 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11095 if (operand_equal_p (arg01, arg11, 0))
11097 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11098 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11099 arg00, arg10);
11100 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11103 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11104 if (operand_equal_p (arg00, arg10, 0))
11106 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11107 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11108 arg01, arg11);
11109 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11113 /* Optimize tan(x)*cos(x) as sin(x). */
11114 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11115 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11116 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11117 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11118 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11119 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11120 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11121 CALL_EXPR_ARG (arg1, 0), 0))
11123 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11125 if (sinfn != NULL_TREE)
11126 return build_call_expr_loc (loc, sinfn, 1,
11127 CALL_EXPR_ARG (arg0, 0));
11130 /* Optimize x*pow(x,c) as pow(x,c+1). */
11131 if (fcode1 == BUILT_IN_POW
11132 || fcode1 == BUILT_IN_POWF
11133 || fcode1 == BUILT_IN_POWL)
11135 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11136 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11137 if (TREE_CODE (arg11) == REAL_CST
11138 && !TREE_OVERFLOW (arg11)
11139 && operand_equal_p (arg0, arg10, 0))
11141 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11142 REAL_VALUE_TYPE c;
11143 tree arg;
11145 c = TREE_REAL_CST (arg11);
11146 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11147 arg = build_real (type, c);
11148 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11152 /* Optimize pow(x,c)*x as pow(x,c+1). */
11153 if (fcode0 == BUILT_IN_POW
11154 || fcode0 == BUILT_IN_POWF
11155 || fcode0 == BUILT_IN_POWL)
11157 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11158 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11159 if (TREE_CODE (arg01) == REAL_CST
11160 && !TREE_OVERFLOW (arg01)
11161 && operand_equal_p (arg1, arg00, 0))
11163 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11164 REAL_VALUE_TYPE c;
11165 tree arg;
11167 c = TREE_REAL_CST (arg01);
11168 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11169 arg = build_real (type, c);
11170 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11174 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11175 if (!in_gimple_form
11176 && optimize
11177 && operand_equal_p (arg0, arg1, 0))
11179 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11181 if (powfn)
11183 tree arg = build_real (type, dconst2);
11184 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11189 goto associate;
11191 case BIT_IOR_EXPR:
11192 bit_ior:
11193 if (integer_all_onesp (arg1))
11194 return omit_one_operand_loc (loc, type, arg1, arg0);
11195 if (integer_zerop (arg1))
11196 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11197 if (operand_equal_p (arg0, arg1, 0))
11198 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11200 /* ~X | X is -1. */
11201 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11202 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11204 t1 = build_zero_cst (type);
11205 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11206 return omit_one_operand_loc (loc, type, t1, arg1);
11209 /* X | ~X is -1. */
11210 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11211 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11213 t1 = build_zero_cst (type);
11214 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11215 return omit_one_operand_loc (loc, type, t1, arg0);
11218 /* Canonicalize (X & C1) | C2. */
11219 if (TREE_CODE (arg0) == BIT_AND_EXPR
11220 && TREE_CODE (arg1) == INTEGER_CST
11221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11223 double_int c1, c2, c3, msk;
11224 int width = TYPE_PRECISION (type), w;
11226 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11227 c2 = tree_to_double_int (arg1);
11229 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11230 if ((c1 & c2) == c1)
11231 return omit_one_operand_loc (loc, type, arg1,
11232 TREE_OPERAND (arg0, 0));
11234 msk = double_int::mask (width);
11236 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11237 if (msk.and_not (c1 | c2).is_zero ())
11238 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11239 TREE_OPERAND (arg0, 0), arg1);
11241 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11242 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11243 mode which allows further optimizations. */
11244 c1 &= msk;
11245 c2 &= msk;
11246 c3 = c1.and_not (c2);
11247 for (w = BITS_PER_UNIT;
11248 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11249 w <<= 1)
11251 unsigned HOST_WIDE_INT mask
11252 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11253 if (((c1.low | c2.low) & mask) == mask
11254 && (c1.low & ~mask) == 0 && c1.high == 0)
11256 c3 = double_int::from_uhwi (mask);
11257 break;
11261 if (c3 != c1)
11262 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11263 fold_build2_loc (loc, BIT_AND_EXPR, type,
11264 TREE_OPERAND (arg0, 0),
11265 double_int_to_tree (type,
11266 c3)),
11267 arg1);
11270 /* (X & Y) | Y is (X, Y). */
11271 if (TREE_CODE (arg0) == BIT_AND_EXPR
11272 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11273 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11274 /* (X & Y) | X is (Y, X). */
11275 if (TREE_CODE (arg0) == BIT_AND_EXPR
11276 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11277 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11278 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11279 /* X | (X & Y) is (Y, X). */
11280 if (TREE_CODE (arg1) == BIT_AND_EXPR
11281 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11282 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11283 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11284 /* X | (Y & X) is (Y, X). */
11285 if (TREE_CODE (arg1) == BIT_AND_EXPR
11286 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11287 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11288 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11290 /* (X & ~Y) | (~X & Y) is X ^ Y */
11291 if (TREE_CODE (arg0) == BIT_AND_EXPR
11292 && TREE_CODE (arg1) == BIT_AND_EXPR)
11294 tree a0, a1, l0, l1, n0, n1;
11296 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11297 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11299 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11300 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11302 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11303 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11305 if ((operand_equal_p (n0, a0, 0)
11306 && operand_equal_p (n1, a1, 0))
11307 || (operand_equal_p (n0, a1, 0)
11308 && operand_equal_p (n1, a0, 0)))
11309 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11312 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11313 if (t1 != NULL_TREE)
11314 return t1;
11316 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11318 This results in more efficient code for machines without a NAND
11319 instruction. Combine will canonicalize to the first form
11320 which will allow use of NAND instructions provided by the
11321 backend if they exist. */
11322 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11323 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11325 return
11326 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11327 build2 (BIT_AND_EXPR, type,
11328 fold_convert_loc (loc, type,
11329 TREE_OPERAND (arg0, 0)),
11330 fold_convert_loc (loc, type,
11331 TREE_OPERAND (arg1, 0))));
11334 /* See if this can be simplified into a rotate first. If that
11335 is unsuccessful continue in the association code. */
11336 goto bit_rotate;
11338 case BIT_XOR_EXPR:
11339 if (integer_zerop (arg1))
11340 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11341 if (integer_all_onesp (arg1))
11342 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11343 if (operand_equal_p (arg0, arg1, 0))
11344 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11346 /* ~X ^ X is -1. */
11347 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11348 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11350 t1 = build_zero_cst (type);
11351 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11352 return omit_one_operand_loc (loc, type, t1, arg1);
11355 /* X ^ ~X is -1. */
11356 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11357 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11359 t1 = build_zero_cst (type);
11360 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11361 return omit_one_operand_loc (loc, type, t1, arg0);
11364 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11365 with a constant, and the two constants have no bits in common,
11366 we should treat this as a BIT_IOR_EXPR since this may produce more
11367 simplifications. */
11368 if (TREE_CODE (arg0) == BIT_AND_EXPR
11369 && TREE_CODE (arg1) == BIT_AND_EXPR
11370 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11371 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11372 && integer_zerop (const_binop (BIT_AND_EXPR,
11373 TREE_OPERAND (arg0, 1),
11374 TREE_OPERAND (arg1, 1))))
11376 code = BIT_IOR_EXPR;
11377 goto bit_ior;
11380 /* (X | Y) ^ X -> Y & ~ X*/
11381 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11382 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11384 tree t2 = TREE_OPERAND (arg0, 1);
11385 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11386 arg1);
11387 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11388 fold_convert_loc (loc, type, t2),
11389 fold_convert_loc (loc, type, t1));
11390 return t1;
11393 /* (Y | X) ^ X -> Y & ~ X*/
11394 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11395 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11397 tree t2 = TREE_OPERAND (arg0, 0);
11398 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11399 arg1);
11400 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11401 fold_convert_loc (loc, type, t2),
11402 fold_convert_loc (loc, type, t1));
11403 return t1;
11406 /* X ^ (X | Y) -> Y & ~ X*/
11407 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11408 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11410 tree t2 = TREE_OPERAND (arg1, 1);
11411 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11412 arg0);
11413 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11414 fold_convert_loc (loc, type, t2),
11415 fold_convert_loc (loc, type, t1));
11416 return t1;
11419 /* X ^ (Y | X) -> Y & ~ X*/
11420 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11421 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11423 tree t2 = TREE_OPERAND (arg1, 0);
11424 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11425 arg0);
11426 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11427 fold_convert_loc (loc, type, t2),
11428 fold_convert_loc (loc, type, t1));
11429 return t1;
11432 /* Convert ~X ^ ~Y to X ^ Y. */
11433 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11434 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11435 return fold_build2_loc (loc, code, type,
11436 fold_convert_loc (loc, type,
11437 TREE_OPERAND (arg0, 0)),
11438 fold_convert_loc (loc, type,
11439 TREE_OPERAND (arg1, 0)));
11441 /* Convert ~X ^ C to X ^ ~C. */
11442 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11443 && TREE_CODE (arg1) == INTEGER_CST)
11444 return fold_build2_loc (loc, code, type,
11445 fold_convert_loc (loc, type,
11446 TREE_OPERAND (arg0, 0)),
11447 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11449 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11450 if (TREE_CODE (arg0) == BIT_AND_EXPR
11451 && integer_onep (TREE_OPERAND (arg0, 1))
11452 && integer_onep (arg1))
11453 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11454 build_zero_cst (TREE_TYPE (arg0)));
11456 /* Fold (X & Y) ^ Y as ~X & Y. */
11457 if (TREE_CODE (arg0) == BIT_AND_EXPR
11458 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11460 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11461 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11462 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11463 fold_convert_loc (loc, type, arg1));
11465 /* Fold (X & Y) ^ X as ~Y & X. */
11466 if (TREE_CODE (arg0) == BIT_AND_EXPR
11467 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11468 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11470 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11471 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11472 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11473 fold_convert_loc (loc, type, arg1));
11475 /* Fold X ^ (X & Y) as X & ~Y. */
11476 if (TREE_CODE (arg1) == BIT_AND_EXPR
11477 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11479 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11480 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11481 fold_convert_loc (loc, type, arg0),
11482 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11484 /* Fold X ^ (Y & X) as ~Y & X. */
11485 if (TREE_CODE (arg1) == BIT_AND_EXPR
11486 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11487 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11489 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11490 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11491 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11492 fold_convert_loc (loc, type, arg0));
11495 /* See if this can be simplified into a rotate first. If that
11496 is unsuccessful continue in the association code. */
11497 goto bit_rotate;
11499 case BIT_AND_EXPR:
11500 if (integer_all_onesp (arg1))
11501 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11502 if (integer_zerop (arg1))
11503 return omit_one_operand_loc (loc, type, arg1, arg0);
11504 if (operand_equal_p (arg0, arg1, 0))
11505 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11507 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11508 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11509 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11510 || (TREE_CODE (arg0) == EQ_EXPR
11511 && integer_zerop (TREE_OPERAND (arg0, 1))))
11512 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11513 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11515 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11516 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11517 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11518 || (TREE_CODE (arg1) == EQ_EXPR
11519 && integer_zerop (TREE_OPERAND (arg1, 1))))
11520 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11521 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11523 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11524 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11525 && TREE_CODE (arg1) == INTEGER_CST
11526 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11528 tree tmp1 = fold_convert_loc (loc, type, arg1);
11529 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11530 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11531 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11532 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11533 return
11534 fold_convert_loc (loc, type,
11535 fold_build2_loc (loc, BIT_IOR_EXPR,
11536 type, tmp2, tmp3));
11539 /* (X | Y) & Y is (X, Y). */
11540 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11541 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11542 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11543 /* (X | Y) & X is (Y, X). */
11544 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11545 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11546 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11547 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11548 /* X & (X | Y) is (Y, X). */
11549 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11550 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11551 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11552 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11553 /* X & (Y | X) is (Y, X). */
11554 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11555 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11556 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11557 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11559 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11560 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11561 && integer_onep (TREE_OPERAND (arg0, 1))
11562 && integer_onep (arg1))
11564 tree tem2;
11565 tem = TREE_OPERAND (arg0, 0);
11566 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11567 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11568 tem, tem2);
11569 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11570 build_zero_cst (TREE_TYPE (tem)));
11572 /* Fold ~X & 1 as (X & 1) == 0. */
11573 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11574 && integer_onep (arg1))
11576 tree tem2;
11577 tem = TREE_OPERAND (arg0, 0);
11578 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11579 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11580 tem, tem2);
11581 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11582 build_zero_cst (TREE_TYPE (tem)));
11584 /* Fold !X & 1 as X == 0. */
11585 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11586 && integer_onep (arg1))
11588 tem = TREE_OPERAND (arg0, 0);
11589 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11590 build_zero_cst (TREE_TYPE (tem)));
11593 /* Fold (X ^ Y) & Y as ~X & Y. */
11594 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11595 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11597 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11598 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11599 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11600 fold_convert_loc (loc, type, arg1));
11602 /* Fold (X ^ Y) & X as ~Y & X. */
11603 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11604 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11605 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11607 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11608 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11609 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11610 fold_convert_loc (loc, type, arg1));
11612 /* Fold X & (X ^ Y) as X & ~Y. */
11613 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11614 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11616 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11617 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11618 fold_convert_loc (loc, type, arg0),
11619 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11621 /* Fold X & (Y ^ X) as ~Y & X. */
11622 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11623 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11624 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11626 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11627 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11628 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11629 fold_convert_loc (loc, type, arg0));
11632 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11633 multiple of 1 << CST. */
11634 if (TREE_CODE (arg1) == INTEGER_CST)
11636 double_int cst1 = tree_to_double_int (arg1);
11637 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11638 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11639 if ((cst1 & ncst1) == ncst1
11640 && multiple_of_p (type, arg0,
11641 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11642 return fold_convert_loc (loc, type, arg0);
11645 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11646 bits from CST2. */
11647 if (TREE_CODE (arg1) == INTEGER_CST
11648 && TREE_CODE (arg0) == MULT_EXPR
11649 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11651 double_int darg1 = tree_to_double_int (arg1);
11652 double_int masked
11653 = mask_with_tz (type, darg1,
11654 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11656 if (masked.is_zero ())
11657 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11658 arg0, arg1);
11659 else if (masked != darg1)
11661 /* Avoid the transform if arg1 is a mask of some
11662 mode which allows further optimizations. */
11663 int pop = darg1.popcount ();
11664 if (!(pop >= BITS_PER_UNIT
11665 && exact_log2 (pop) != -1
11666 && double_int::mask (pop) == darg1))
11667 return fold_build2_loc (loc, code, type, op0,
11668 double_int_to_tree (type, masked));
11672 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11673 ((A & N) + B) & M -> (A + B) & M
11674 Similarly if (N & M) == 0,
11675 ((A | N) + B) & M -> (A + B) & M
11676 and for - instead of + (or unary - instead of +)
11677 and/or ^ instead of |.
11678 If B is constant and (B & M) == 0, fold into A & M. */
11679 if (host_integerp (arg1, 1))
11681 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11682 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11683 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11684 && (TREE_CODE (arg0) == PLUS_EXPR
11685 || TREE_CODE (arg0) == MINUS_EXPR
11686 || TREE_CODE (arg0) == NEGATE_EXPR)
11687 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11688 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11690 tree pmop[2];
11691 int which = 0;
11692 unsigned HOST_WIDE_INT cst0;
11694 /* Now we know that arg0 is (C + D) or (C - D) or
11695 -C and arg1 (M) is == (1LL << cst) - 1.
11696 Store C into PMOP[0] and D into PMOP[1]. */
11697 pmop[0] = TREE_OPERAND (arg0, 0);
11698 pmop[1] = NULL;
11699 if (TREE_CODE (arg0) != NEGATE_EXPR)
11701 pmop[1] = TREE_OPERAND (arg0, 1);
11702 which = 1;
11705 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11706 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11707 & cst1) != cst1)
11708 which = -1;
11710 for (; which >= 0; which--)
11711 switch (TREE_CODE (pmop[which]))
11713 case BIT_AND_EXPR:
11714 case BIT_IOR_EXPR:
11715 case BIT_XOR_EXPR:
11716 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11717 != INTEGER_CST)
11718 break;
11719 /* tree_low_cst not used, because we don't care about
11720 the upper bits. */
11721 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11722 cst0 &= cst1;
11723 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11725 if (cst0 != cst1)
11726 break;
11728 else if (cst0 != 0)
11729 break;
11730 /* If C or D is of the form (A & N) where
11731 (N & M) == M, or of the form (A | N) or
11732 (A ^ N) where (N & M) == 0, replace it with A. */
11733 pmop[which] = TREE_OPERAND (pmop[which], 0);
11734 break;
11735 case INTEGER_CST:
11736 /* If C or D is a N where (N & M) == 0, it can be
11737 omitted (assumed 0). */
11738 if ((TREE_CODE (arg0) == PLUS_EXPR
11739 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11740 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11741 pmop[which] = NULL;
11742 break;
11743 default:
11744 break;
11747 /* Only build anything new if we optimized one or both arguments
11748 above. */
11749 if (pmop[0] != TREE_OPERAND (arg0, 0)
11750 || (TREE_CODE (arg0) != NEGATE_EXPR
11751 && pmop[1] != TREE_OPERAND (arg0, 1)))
11753 tree utype = TREE_TYPE (arg0);
11754 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11756 /* Perform the operations in a type that has defined
11757 overflow behavior. */
11758 utype = unsigned_type_for (TREE_TYPE (arg0));
11759 if (pmop[0] != NULL)
11760 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11761 if (pmop[1] != NULL)
11762 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11765 if (TREE_CODE (arg0) == NEGATE_EXPR)
11766 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11767 else if (TREE_CODE (arg0) == PLUS_EXPR)
11769 if (pmop[0] != NULL && pmop[1] != NULL)
11770 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11771 pmop[0], pmop[1]);
11772 else if (pmop[0] != NULL)
11773 tem = pmop[0];
11774 else if (pmop[1] != NULL)
11775 tem = pmop[1];
11776 else
11777 return build_int_cst (type, 0);
11779 else if (pmop[0] == NULL)
11780 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11781 else
11782 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11783 pmop[0], pmop[1]);
11784 /* TEM is now the new binary +, - or unary - replacement. */
11785 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11786 fold_convert_loc (loc, utype, arg1));
11787 return fold_convert_loc (loc, type, tem);
11792 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11793 if (t1 != NULL_TREE)
11794 return t1;
11795 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11796 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11797 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11799 unsigned int prec
11800 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11802 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11803 && (~TREE_INT_CST_LOW (arg1)
11804 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11805 return
11806 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11809 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11811 This results in more efficient code for machines without a NOR
11812 instruction. Combine will canonicalize to the first form
11813 which will allow use of NOR instructions provided by the
11814 backend if they exist. */
11815 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11816 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11818 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11819 build2 (BIT_IOR_EXPR, type,
11820 fold_convert_loc (loc, type,
11821 TREE_OPERAND (arg0, 0)),
11822 fold_convert_loc (loc, type,
11823 TREE_OPERAND (arg1, 0))));
11826 /* If arg0 is derived from the address of an object or function, we may
11827 be able to fold this expression using the object or function's
11828 alignment. */
11829 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11831 unsigned HOST_WIDE_INT modulus, residue;
11832 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11834 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11835 integer_onep (arg1));
11837 /* This works because modulus is a power of 2. If this weren't the
11838 case, we'd have to replace it by its greatest power-of-2
11839 divisor: modulus & -modulus. */
11840 if (low < modulus)
11841 return build_int_cst (type, residue & low);
11844 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11845 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11846 if the new mask might be further optimized. */
11847 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11848 || TREE_CODE (arg0) == RSHIFT_EXPR)
11849 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11850 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11851 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11852 < TYPE_PRECISION (TREE_TYPE (arg0))
11853 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11854 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11856 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11857 unsigned HOST_WIDE_INT mask
11858 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11859 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11860 tree shift_type = TREE_TYPE (arg0);
11862 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11863 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11864 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11865 && TYPE_PRECISION (TREE_TYPE (arg0))
11866 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11868 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11869 tree arg00 = TREE_OPERAND (arg0, 0);
11870 /* See if more bits can be proven as zero because of
11871 zero extension. */
11872 if (TREE_CODE (arg00) == NOP_EXPR
11873 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11875 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11876 if (TYPE_PRECISION (inner_type)
11877 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11878 && TYPE_PRECISION (inner_type) < prec)
11880 prec = TYPE_PRECISION (inner_type);
11881 /* See if we can shorten the right shift. */
11882 if (shiftc < prec)
11883 shift_type = inner_type;
11886 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11887 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11888 zerobits <<= prec - shiftc;
11889 /* For arithmetic shift if sign bit could be set, zerobits
11890 can contain actually sign bits, so no transformation is
11891 possible, unless MASK masks them all away. In that
11892 case the shift needs to be converted into logical shift. */
11893 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11894 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11896 if ((mask & zerobits) == 0)
11897 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11898 else
11899 zerobits = 0;
11903 /* ((X << 16) & 0xff00) is (X, 0). */
11904 if ((mask & zerobits) == mask)
11905 return omit_one_operand_loc (loc, type,
11906 build_int_cst (type, 0), arg0);
11908 newmask = mask | zerobits;
11909 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11911 unsigned int prec;
11913 /* Only do the transformation if NEWMASK is some integer
11914 mode's mask. */
11915 for (prec = BITS_PER_UNIT;
11916 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11917 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11918 break;
11919 if (prec < HOST_BITS_PER_WIDE_INT
11920 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11922 tree newmaskt;
11924 if (shift_type != TREE_TYPE (arg0))
11926 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11927 fold_convert_loc (loc, shift_type,
11928 TREE_OPERAND (arg0, 0)),
11929 TREE_OPERAND (arg0, 1));
11930 tem = fold_convert_loc (loc, type, tem);
11932 else
11933 tem = op0;
11934 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11935 if (!tree_int_cst_equal (newmaskt, arg1))
11936 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11941 goto associate;
11943 case RDIV_EXPR:
11944 /* Don't touch a floating-point divide by zero unless the mode
11945 of the constant can represent infinity. */
11946 if (TREE_CODE (arg1) == REAL_CST
11947 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11948 && real_zerop (arg1))
11949 return NULL_TREE;
11951 /* Optimize A / A to 1.0 if we don't care about
11952 NaNs or Infinities. Skip the transformation
11953 for non-real operands. */
11954 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11955 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11956 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11957 && operand_equal_p (arg0, arg1, 0))
11959 tree r = build_real (TREE_TYPE (arg0), dconst1);
11961 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11964 /* The complex version of the above A / A optimization. */
11965 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11966 && operand_equal_p (arg0, arg1, 0))
11968 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11969 if (! HONOR_NANS (TYPE_MODE (elem_type))
11970 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11972 tree r = build_real (elem_type, dconst1);
11973 /* omit_two_operands will call fold_convert for us. */
11974 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11978 /* (-A) / (-B) -> A / B */
11979 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11980 return fold_build2_loc (loc, RDIV_EXPR, type,
11981 TREE_OPERAND (arg0, 0),
11982 negate_expr (arg1));
11983 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11984 return fold_build2_loc (loc, RDIV_EXPR, type,
11985 negate_expr (arg0),
11986 TREE_OPERAND (arg1, 0));
11988 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11989 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11990 && real_onep (arg1))
11991 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11993 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11994 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11995 && real_minus_onep (arg1))
11996 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11997 negate_expr (arg0)));
11999 /* If ARG1 is a constant, we can convert this to a multiply by the
12000 reciprocal. This does not have the same rounding properties,
12001 so only do this if -freciprocal-math. We can actually
12002 always safely do it if ARG1 is a power of two, but it's hard to
12003 tell if it is or not in a portable manner. */
12004 if (optimize
12005 && (TREE_CODE (arg1) == REAL_CST
12006 || (TREE_CODE (arg1) == COMPLEX_CST
12007 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12008 || (TREE_CODE (arg1) == VECTOR_CST
12009 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12011 if (flag_reciprocal_math
12012 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12013 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12014 /* Find the reciprocal if optimizing and the result is exact.
12015 TODO: Complex reciprocal not implemented. */
12016 if (TREE_CODE (arg1) != COMPLEX_CST)
12018 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12020 if (inverse)
12021 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12024 /* Convert A/B/C to A/(B*C). */
12025 if (flag_reciprocal_math
12026 && TREE_CODE (arg0) == RDIV_EXPR)
12027 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12028 fold_build2_loc (loc, MULT_EXPR, type,
12029 TREE_OPERAND (arg0, 1), arg1));
12031 /* Convert A/(B/C) to (A/B)*C. */
12032 if (flag_reciprocal_math
12033 && TREE_CODE (arg1) == RDIV_EXPR)
12034 return fold_build2_loc (loc, MULT_EXPR, type,
12035 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12036 TREE_OPERAND (arg1, 0)),
12037 TREE_OPERAND (arg1, 1));
12039 /* Convert C1/(X*C2) into (C1/C2)/X. */
12040 if (flag_reciprocal_math
12041 && TREE_CODE (arg1) == MULT_EXPR
12042 && TREE_CODE (arg0) == REAL_CST
12043 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12045 tree tem = const_binop (RDIV_EXPR, arg0,
12046 TREE_OPERAND (arg1, 1));
12047 if (tem)
12048 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12049 TREE_OPERAND (arg1, 0));
12052 if (flag_unsafe_math_optimizations)
12054 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12055 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12057 /* Optimize sin(x)/cos(x) as tan(x). */
12058 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12059 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12060 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12061 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12062 CALL_EXPR_ARG (arg1, 0), 0))
12064 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12066 if (tanfn != NULL_TREE)
12067 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12070 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12071 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12072 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12073 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12074 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12075 CALL_EXPR_ARG (arg1, 0), 0))
12077 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12079 if (tanfn != NULL_TREE)
12081 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12082 CALL_EXPR_ARG (arg0, 0));
12083 return fold_build2_loc (loc, RDIV_EXPR, type,
12084 build_real (type, dconst1), tmp);
12088 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12089 NaNs or Infinities. */
12090 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12091 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12092 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12094 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12095 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12097 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12098 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12099 && operand_equal_p (arg00, arg01, 0))
12101 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12103 if (cosfn != NULL_TREE)
12104 return build_call_expr_loc (loc, cosfn, 1, arg00);
12108 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12109 NaNs or Infinities. */
12110 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12111 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12112 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12114 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12115 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12117 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12118 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12119 && operand_equal_p (arg00, arg01, 0))
12121 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12123 if (cosfn != NULL_TREE)
12125 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12126 return fold_build2_loc (loc, RDIV_EXPR, type,
12127 build_real (type, dconst1),
12128 tmp);
12133 /* Optimize pow(x,c)/x as pow(x,c-1). */
12134 if (fcode0 == BUILT_IN_POW
12135 || fcode0 == BUILT_IN_POWF
12136 || fcode0 == BUILT_IN_POWL)
12138 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12139 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12140 if (TREE_CODE (arg01) == REAL_CST
12141 && !TREE_OVERFLOW (arg01)
12142 && operand_equal_p (arg1, arg00, 0))
12144 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12145 REAL_VALUE_TYPE c;
12146 tree arg;
12148 c = TREE_REAL_CST (arg01);
12149 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12150 arg = build_real (type, c);
12151 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12155 /* Optimize a/root(b/c) into a*root(c/b). */
12156 if (BUILTIN_ROOT_P (fcode1))
12158 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12160 if (TREE_CODE (rootarg) == RDIV_EXPR)
12162 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12163 tree b = TREE_OPERAND (rootarg, 0);
12164 tree c = TREE_OPERAND (rootarg, 1);
12166 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12168 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12169 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12173 /* Optimize x/expN(y) into x*expN(-y). */
12174 if (BUILTIN_EXPONENT_P (fcode1))
12176 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12177 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12178 arg1 = build_call_expr_loc (loc,
12179 expfn, 1,
12180 fold_convert_loc (loc, type, arg));
12181 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12184 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12185 if (fcode1 == BUILT_IN_POW
12186 || fcode1 == BUILT_IN_POWF
12187 || fcode1 == BUILT_IN_POWL)
12189 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12190 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12191 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12192 tree neg11 = fold_convert_loc (loc, type,
12193 negate_expr (arg11));
12194 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12195 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12198 return NULL_TREE;
12200 case TRUNC_DIV_EXPR:
12201 /* Optimize (X & (-A)) / A where A is a power of 2,
12202 to X >> log2(A) */
12203 if (TREE_CODE (arg0) == BIT_AND_EXPR
12204 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12205 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12207 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12208 arg1, TREE_OPERAND (arg0, 1));
12209 if (sum && integer_zerop (sum)) {
12210 unsigned long pow2;
12212 if (TREE_INT_CST_LOW (arg1))
12213 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12214 else
12215 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12216 + HOST_BITS_PER_WIDE_INT;
12218 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12219 TREE_OPERAND (arg0, 0),
12220 build_int_cst (integer_type_node, pow2));
12224 /* Fall through */
12226 case FLOOR_DIV_EXPR:
12227 /* Simplify A / (B << N) where A and B are positive and B is
12228 a power of 2, to A >> (N + log2(B)). */
12229 strict_overflow_p = false;
12230 if (TREE_CODE (arg1) == LSHIFT_EXPR
12231 && (TYPE_UNSIGNED (type)
12232 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12234 tree sval = TREE_OPERAND (arg1, 0);
12235 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12237 tree sh_cnt = TREE_OPERAND (arg1, 1);
12238 unsigned long pow2;
12240 if (TREE_INT_CST_LOW (sval))
12241 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12242 else
12243 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12244 + HOST_BITS_PER_WIDE_INT;
12246 if (strict_overflow_p)
12247 fold_overflow_warning (("assuming signed overflow does not "
12248 "occur when simplifying A / (B << N)"),
12249 WARN_STRICT_OVERFLOW_MISC);
12251 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12252 sh_cnt,
12253 build_int_cst (TREE_TYPE (sh_cnt),
12254 pow2));
12255 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12256 fold_convert_loc (loc, type, arg0), sh_cnt);
12260 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12261 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12262 if (INTEGRAL_TYPE_P (type)
12263 && TYPE_UNSIGNED (type)
12264 && code == FLOOR_DIV_EXPR)
12265 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12267 /* Fall through */
12269 case ROUND_DIV_EXPR:
12270 case CEIL_DIV_EXPR:
12271 case EXACT_DIV_EXPR:
12272 if (integer_onep (arg1))
12273 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12274 if (integer_zerop (arg1))
12275 return NULL_TREE;
12276 /* X / -1 is -X. */
12277 if (!TYPE_UNSIGNED (type)
12278 && TREE_CODE (arg1) == INTEGER_CST
12279 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12280 && TREE_INT_CST_HIGH (arg1) == -1)
12281 return fold_convert_loc (loc, type, negate_expr (arg0));
12283 /* Convert -A / -B to A / B when the type is signed and overflow is
12284 undefined. */
12285 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12286 && TREE_CODE (arg0) == NEGATE_EXPR
12287 && negate_expr_p (arg1))
12289 if (INTEGRAL_TYPE_P (type))
12290 fold_overflow_warning (("assuming signed overflow does not occur "
12291 "when distributing negation across "
12292 "division"),
12293 WARN_STRICT_OVERFLOW_MISC);
12294 return fold_build2_loc (loc, code, type,
12295 fold_convert_loc (loc, type,
12296 TREE_OPERAND (arg0, 0)),
12297 fold_convert_loc (loc, type,
12298 negate_expr (arg1)));
12300 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12301 && TREE_CODE (arg1) == NEGATE_EXPR
12302 && negate_expr_p (arg0))
12304 if (INTEGRAL_TYPE_P (type))
12305 fold_overflow_warning (("assuming signed overflow does not occur "
12306 "when distributing negation across "
12307 "division"),
12308 WARN_STRICT_OVERFLOW_MISC);
12309 return fold_build2_loc (loc, code, type,
12310 fold_convert_loc (loc, type,
12311 negate_expr (arg0)),
12312 fold_convert_loc (loc, type,
12313 TREE_OPERAND (arg1, 0)));
12316 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12317 operation, EXACT_DIV_EXPR.
12319 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12320 At one time others generated faster code, it's not clear if they do
12321 after the last round to changes to the DIV code in expmed.c. */
12322 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12323 && multiple_of_p (type, arg0, arg1))
12324 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12326 strict_overflow_p = false;
12327 if (TREE_CODE (arg1) == INTEGER_CST
12328 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12329 &strict_overflow_p)))
12331 if (strict_overflow_p)
12332 fold_overflow_warning (("assuming signed overflow does not occur "
12333 "when simplifying division"),
12334 WARN_STRICT_OVERFLOW_MISC);
12335 return fold_convert_loc (loc, type, tem);
12338 return NULL_TREE;
12340 case CEIL_MOD_EXPR:
12341 case FLOOR_MOD_EXPR:
12342 case ROUND_MOD_EXPR:
12343 case TRUNC_MOD_EXPR:
12344 /* X % 1 is always zero, but be sure to preserve any side
12345 effects in X. */
12346 if (integer_onep (arg1))
12347 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12349 /* X % 0, return X % 0 unchanged so that we can get the
12350 proper warnings and errors. */
12351 if (integer_zerop (arg1))
12352 return NULL_TREE;
12354 /* 0 % X is always zero, but be sure to preserve any side
12355 effects in X. Place this after checking for X == 0. */
12356 if (integer_zerop (arg0))
12357 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12359 /* X % -1 is zero. */
12360 if (!TYPE_UNSIGNED (type)
12361 && TREE_CODE (arg1) == INTEGER_CST
12362 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12363 && TREE_INT_CST_HIGH (arg1) == -1)
12364 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12366 /* X % -C is the same as X % C. */
12367 if (code == TRUNC_MOD_EXPR
12368 && !TYPE_UNSIGNED (type)
12369 && TREE_CODE (arg1) == INTEGER_CST
12370 && !TREE_OVERFLOW (arg1)
12371 && TREE_INT_CST_HIGH (arg1) < 0
12372 && !TYPE_OVERFLOW_TRAPS (type)
12373 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12374 && !sign_bit_p (arg1, arg1))
12375 return fold_build2_loc (loc, code, type,
12376 fold_convert_loc (loc, type, arg0),
12377 fold_convert_loc (loc, type,
12378 negate_expr (arg1)));
12380 /* X % -Y is the same as X % Y. */
12381 if (code == TRUNC_MOD_EXPR
12382 && !TYPE_UNSIGNED (type)
12383 && TREE_CODE (arg1) == NEGATE_EXPR
12384 && !TYPE_OVERFLOW_TRAPS (type))
12385 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12386 fold_convert_loc (loc, type,
12387 TREE_OPERAND (arg1, 0)));
12389 strict_overflow_p = false;
12390 if (TREE_CODE (arg1) == INTEGER_CST
12391 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12392 &strict_overflow_p)))
12394 if (strict_overflow_p)
12395 fold_overflow_warning (("assuming signed overflow does not occur "
12396 "when simplifying modulus"),
12397 WARN_STRICT_OVERFLOW_MISC);
12398 return fold_convert_loc (loc, type, tem);
12401 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12402 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12403 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12404 && (TYPE_UNSIGNED (type)
12405 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12407 tree c = arg1;
12408 /* Also optimize A % (C << N) where C is a power of 2,
12409 to A & ((C << N) - 1). */
12410 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12411 c = TREE_OPERAND (arg1, 0);
12413 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12415 tree mask
12416 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12417 build_int_cst (TREE_TYPE (arg1), 1));
12418 if (strict_overflow_p)
12419 fold_overflow_warning (("assuming signed overflow does not "
12420 "occur when simplifying "
12421 "X % (power of two)"),
12422 WARN_STRICT_OVERFLOW_MISC);
12423 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12424 fold_convert_loc (loc, type, arg0),
12425 fold_convert_loc (loc, type, mask));
12429 return NULL_TREE;
12431 case LROTATE_EXPR:
12432 case RROTATE_EXPR:
12433 if (integer_all_onesp (arg0))
12434 return omit_one_operand_loc (loc, type, arg0, arg1);
12435 goto shift;
12437 case RSHIFT_EXPR:
12438 /* Optimize -1 >> x for arithmetic right shifts. */
12439 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12440 && tree_expr_nonnegative_p (arg1))
12441 return omit_one_operand_loc (loc, type, arg0, arg1);
12442 /* ... fall through ... */
12444 case LSHIFT_EXPR:
12445 shift:
12446 if (integer_zerop (arg1))
12447 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12448 if (integer_zerop (arg0))
12449 return omit_one_operand_loc (loc, type, arg0, arg1);
12451 /* Since negative shift count is not well-defined,
12452 don't try to compute it in the compiler. */
12453 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12454 return NULL_TREE;
12456 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12457 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12458 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12459 && host_integerp (TREE_OPERAND (arg0, 1), false)
12460 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12462 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12463 + TREE_INT_CST_LOW (arg1));
12465 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12466 being well defined. */
12467 if (low >= TYPE_PRECISION (type))
12469 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12470 low = low % TYPE_PRECISION (type);
12471 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12472 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12473 TREE_OPERAND (arg0, 0));
12474 else
12475 low = TYPE_PRECISION (type) - 1;
12478 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12479 build_int_cst (type, low));
12482 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12483 into x & ((unsigned)-1 >> c) for unsigned types. */
12484 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12485 || (TYPE_UNSIGNED (type)
12486 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12487 && host_integerp (arg1, false)
12488 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12489 && host_integerp (TREE_OPERAND (arg0, 1), false)
12490 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12492 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12493 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12494 tree lshift;
12495 tree arg00;
12497 if (low0 == low1)
12499 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12501 lshift = build_int_cst (type, -1);
12502 lshift = int_const_binop (code, lshift, arg1);
12504 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12508 /* Rewrite an LROTATE_EXPR by a constant into an
12509 RROTATE_EXPR by a new constant. */
12510 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12512 tree tem = build_int_cst (TREE_TYPE (arg1),
12513 TYPE_PRECISION (type));
12514 tem = const_binop (MINUS_EXPR, tem, arg1);
12515 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12518 /* If we have a rotate of a bit operation with the rotate count and
12519 the second operand of the bit operation both constant,
12520 permute the two operations. */
12521 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12522 && (TREE_CODE (arg0) == BIT_AND_EXPR
12523 || TREE_CODE (arg0) == BIT_IOR_EXPR
12524 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12526 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12527 fold_build2_loc (loc, code, type,
12528 TREE_OPERAND (arg0, 0), arg1),
12529 fold_build2_loc (loc, code, type,
12530 TREE_OPERAND (arg0, 1), arg1));
12532 /* Two consecutive rotates adding up to the precision of the
12533 type can be ignored. */
12534 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12535 && TREE_CODE (arg0) == RROTATE_EXPR
12536 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12537 && TREE_INT_CST_HIGH (arg1) == 0
12538 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12539 && ((TREE_INT_CST_LOW (arg1)
12540 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12541 == (unsigned int) TYPE_PRECISION (type)))
12542 return TREE_OPERAND (arg0, 0);
12544 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12545 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12546 if the latter can be further optimized. */
12547 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12548 && TREE_CODE (arg0) == BIT_AND_EXPR
12549 && TREE_CODE (arg1) == INTEGER_CST
12550 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12552 tree mask = fold_build2_loc (loc, code, type,
12553 fold_convert_loc (loc, type,
12554 TREE_OPERAND (arg0, 1)),
12555 arg1);
12556 tree shift = fold_build2_loc (loc, code, type,
12557 fold_convert_loc (loc, type,
12558 TREE_OPERAND (arg0, 0)),
12559 arg1);
12560 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12561 if (tem)
12562 return tem;
12565 return NULL_TREE;
12567 case MIN_EXPR:
12568 if (operand_equal_p (arg0, arg1, 0))
12569 return omit_one_operand_loc (loc, type, arg0, arg1);
12570 if (INTEGRAL_TYPE_P (type)
12571 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12572 return omit_one_operand_loc (loc, type, arg1, arg0);
12573 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12574 if (tem)
12575 return tem;
12576 goto associate;
12578 case MAX_EXPR:
12579 if (operand_equal_p (arg0, arg1, 0))
12580 return omit_one_operand_loc (loc, type, arg0, arg1);
12581 if (INTEGRAL_TYPE_P (type)
12582 && TYPE_MAX_VALUE (type)
12583 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12584 return omit_one_operand_loc (loc, type, arg1, arg0);
12585 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12586 if (tem)
12587 return tem;
12588 goto associate;
12590 case TRUTH_ANDIF_EXPR:
12591 /* Note that the operands of this must be ints
12592 and their values must be 0 or 1.
12593 ("true" is a fixed value perhaps depending on the language.) */
12594 /* If first arg is constant zero, return it. */
12595 if (integer_zerop (arg0))
12596 return fold_convert_loc (loc, type, arg0);
12597 case TRUTH_AND_EXPR:
12598 /* If either arg is constant true, drop it. */
12599 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12600 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12601 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12602 /* Preserve sequence points. */
12603 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12604 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12605 /* If second arg is constant zero, result is zero, but first arg
12606 must be evaluated. */
12607 if (integer_zerop (arg1))
12608 return omit_one_operand_loc (loc, type, arg1, arg0);
12609 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12610 case will be handled here. */
12611 if (integer_zerop (arg0))
12612 return omit_one_operand_loc (loc, type, arg0, arg1);
12614 /* !X && X is always false. */
12615 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12616 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12617 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12618 /* X && !X is always false. */
12619 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12620 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12621 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12623 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12624 means A >= Y && A != MAX, but in this case we know that
12625 A < X <= MAX. */
12627 if (!TREE_SIDE_EFFECTS (arg0)
12628 && !TREE_SIDE_EFFECTS (arg1))
12630 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12631 if (tem && !operand_equal_p (tem, arg0, 0))
12632 return fold_build2_loc (loc, code, type, tem, arg1);
12634 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12635 if (tem && !operand_equal_p (tem, arg1, 0))
12636 return fold_build2_loc (loc, code, type, arg0, tem);
12639 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12640 != NULL_TREE)
12641 return tem;
12643 return NULL_TREE;
12645 case TRUTH_ORIF_EXPR:
12646 /* Note that the operands of this must be ints
12647 and their values must be 0 or true.
12648 ("true" is a fixed value perhaps depending on the language.) */
12649 /* If first arg is constant true, return it. */
12650 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12651 return fold_convert_loc (loc, type, arg0);
12652 case TRUTH_OR_EXPR:
12653 /* If either arg is constant zero, drop it. */
12654 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12655 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12656 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12657 /* Preserve sequence points. */
12658 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12659 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12660 /* If second arg is constant true, result is true, but we must
12661 evaluate first arg. */
12662 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12663 return omit_one_operand_loc (loc, type, arg1, arg0);
12664 /* Likewise for first arg, but note this only occurs here for
12665 TRUTH_OR_EXPR. */
12666 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12667 return omit_one_operand_loc (loc, type, arg0, arg1);
12669 /* !X || X is always true. */
12670 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12671 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12672 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12673 /* X || !X is always true. */
12674 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12675 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12676 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12678 /* (X && !Y) || (!X && Y) is X ^ Y */
12679 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12680 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12682 tree a0, a1, l0, l1, n0, n1;
12684 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12685 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12687 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12688 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12690 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12691 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12693 if ((operand_equal_p (n0, a0, 0)
12694 && operand_equal_p (n1, a1, 0))
12695 || (operand_equal_p (n0, a1, 0)
12696 && operand_equal_p (n1, a0, 0)))
12697 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12700 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12701 != NULL_TREE)
12702 return tem;
12704 return NULL_TREE;
12706 case TRUTH_XOR_EXPR:
12707 /* If the second arg is constant zero, drop it. */
12708 if (integer_zerop (arg1))
12709 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12710 /* If the second arg is constant true, this is a logical inversion. */
12711 if (integer_onep (arg1))
12713 /* Only call invert_truthvalue if operand is a truth value. */
12714 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12715 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12716 else
12717 tem = invert_truthvalue_loc (loc, arg0);
12718 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12720 /* Identical arguments cancel to zero. */
12721 if (operand_equal_p (arg0, arg1, 0))
12722 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12724 /* !X ^ X is always true. */
12725 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12726 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12727 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12729 /* X ^ !X is always true. */
12730 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12731 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12732 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12734 return NULL_TREE;
12736 case EQ_EXPR:
12737 case NE_EXPR:
12738 STRIP_NOPS (arg0);
12739 STRIP_NOPS (arg1);
12741 tem = fold_comparison (loc, code, type, op0, op1);
12742 if (tem != NULL_TREE)
12743 return tem;
12745 /* bool_var != 0 becomes bool_var. */
12746 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12747 && code == NE_EXPR)
12748 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12750 /* bool_var == 1 becomes bool_var. */
12751 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12752 && code == EQ_EXPR)
12753 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12755 /* bool_var != 1 becomes !bool_var. */
12756 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12757 && code == NE_EXPR)
12758 return fold_convert_loc (loc, type,
12759 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12760 TREE_TYPE (arg0), arg0));
12762 /* bool_var == 0 becomes !bool_var. */
12763 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12764 && code == EQ_EXPR)
12765 return fold_convert_loc (loc, type,
12766 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12767 TREE_TYPE (arg0), arg0));
12769 /* !exp != 0 becomes !exp */
12770 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12771 && code == NE_EXPR)
12772 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12774 /* If this is an equality comparison of the address of two non-weak,
12775 unaliased symbols neither of which are extern (since we do not
12776 have access to attributes for externs), then we know the result. */
12777 if (TREE_CODE (arg0) == ADDR_EXPR
12778 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12779 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12780 && ! lookup_attribute ("alias",
12781 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12782 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12783 && TREE_CODE (arg1) == ADDR_EXPR
12784 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12785 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12786 && ! lookup_attribute ("alias",
12787 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12788 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12790 /* We know that we're looking at the address of two
12791 non-weak, unaliased, static _DECL nodes.
12793 It is both wasteful and incorrect to call operand_equal_p
12794 to compare the two ADDR_EXPR nodes. It is wasteful in that
12795 all we need to do is test pointer equality for the arguments
12796 to the two ADDR_EXPR nodes. It is incorrect to use
12797 operand_equal_p as that function is NOT equivalent to a
12798 C equality test. It can in fact return false for two
12799 objects which would test as equal using the C equality
12800 operator. */
12801 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12802 return constant_boolean_node (equal
12803 ? code == EQ_EXPR : code != EQ_EXPR,
12804 type);
12807 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12808 a MINUS_EXPR of a constant, we can convert it into a comparison with
12809 a revised constant as long as no overflow occurs. */
12810 if (TREE_CODE (arg1) == INTEGER_CST
12811 && (TREE_CODE (arg0) == PLUS_EXPR
12812 || TREE_CODE (arg0) == MINUS_EXPR)
12813 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12814 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12815 ? MINUS_EXPR : PLUS_EXPR,
12816 fold_convert_loc (loc, TREE_TYPE (arg0),
12817 arg1),
12818 TREE_OPERAND (arg0, 1)))
12819 && !TREE_OVERFLOW (tem))
12820 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12822 /* Similarly for a NEGATE_EXPR. */
12823 if (TREE_CODE (arg0) == NEGATE_EXPR
12824 && TREE_CODE (arg1) == INTEGER_CST
12825 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12826 arg1)))
12827 && TREE_CODE (tem) == INTEGER_CST
12828 && !TREE_OVERFLOW (tem))
12829 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12831 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12832 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12833 && TREE_CODE (arg1) == INTEGER_CST
12834 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12835 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12836 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12837 fold_convert_loc (loc,
12838 TREE_TYPE (arg0),
12839 arg1),
12840 TREE_OPERAND (arg0, 1)));
12842 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12843 if ((TREE_CODE (arg0) == PLUS_EXPR
12844 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12845 || TREE_CODE (arg0) == MINUS_EXPR)
12846 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12847 0)),
12848 arg1, 0)
12849 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12850 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12852 tree val = TREE_OPERAND (arg0, 1);
12853 return omit_two_operands_loc (loc, type,
12854 fold_build2_loc (loc, code, type,
12855 val,
12856 build_int_cst (TREE_TYPE (val),
12857 0)),
12858 TREE_OPERAND (arg0, 0), arg1);
12861 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12862 if (TREE_CODE (arg0) == MINUS_EXPR
12863 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12864 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12865 1)),
12866 arg1, 0)
12867 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12869 return omit_two_operands_loc (loc, type,
12870 code == NE_EXPR
12871 ? boolean_true_node : boolean_false_node,
12872 TREE_OPERAND (arg0, 1), arg1);
12875 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12876 for !=. Don't do this for ordered comparisons due to overflow. */
12877 if (TREE_CODE (arg0) == MINUS_EXPR
12878 && integer_zerop (arg1))
12879 return fold_build2_loc (loc, code, type,
12880 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12882 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12883 if (TREE_CODE (arg0) == ABS_EXPR
12884 && (integer_zerop (arg1) || real_zerop (arg1)))
12885 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12887 /* If this is an EQ or NE comparison with zero and ARG0 is
12888 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12889 two operations, but the latter can be done in one less insn
12890 on machines that have only two-operand insns or on which a
12891 constant cannot be the first operand. */
12892 if (TREE_CODE (arg0) == BIT_AND_EXPR
12893 && integer_zerop (arg1))
12895 tree arg00 = TREE_OPERAND (arg0, 0);
12896 tree arg01 = TREE_OPERAND (arg0, 1);
12897 if (TREE_CODE (arg00) == LSHIFT_EXPR
12898 && integer_onep (TREE_OPERAND (arg00, 0)))
12900 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12901 arg01, TREE_OPERAND (arg00, 1));
12902 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12903 build_int_cst (TREE_TYPE (arg0), 1));
12904 return fold_build2_loc (loc, code, type,
12905 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12906 arg1);
12908 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12909 && integer_onep (TREE_OPERAND (arg01, 0)))
12911 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12912 arg00, TREE_OPERAND (arg01, 1));
12913 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12914 build_int_cst (TREE_TYPE (arg0), 1));
12915 return fold_build2_loc (loc, code, type,
12916 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12917 arg1);
12921 /* If this is an NE or EQ comparison of zero against the result of a
12922 signed MOD operation whose second operand is a power of 2, make
12923 the MOD operation unsigned since it is simpler and equivalent. */
12924 if (integer_zerop (arg1)
12925 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12926 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12927 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12928 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12929 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12930 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12932 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12933 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12934 fold_convert_loc (loc, newtype,
12935 TREE_OPERAND (arg0, 0)),
12936 fold_convert_loc (loc, newtype,
12937 TREE_OPERAND (arg0, 1)));
12939 return fold_build2_loc (loc, code, type, newmod,
12940 fold_convert_loc (loc, newtype, arg1));
12943 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12944 C1 is a valid shift constant, and C2 is a power of two, i.e.
12945 a single bit. */
12946 if (TREE_CODE (arg0) == BIT_AND_EXPR
12947 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12948 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12949 == INTEGER_CST
12950 && integer_pow2p (TREE_OPERAND (arg0, 1))
12951 && integer_zerop (arg1))
12953 tree itype = TREE_TYPE (arg0);
12954 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12955 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12957 /* Check for a valid shift count. */
12958 if (TREE_INT_CST_HIGH (arg001) == 0
12959 && TREE_INT_CST_LOW (arg001) < prec)
12961 tree arg01 = TREE_OPERAND (arg0, 1);
12962 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12963 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12964 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12965 can be rewritten as (X & (C2 << C1)) != 0. */
12966 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12968 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12969 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12970 return fold_build2_loc (loc, code, type, tem,
12971 fold_convert_loc (loc, itype, arg1));
12973 /* Otherwise, for signed (arithmetic) shifts,
12974 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12975 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12976 else if (!TYPE_UNSIGNED (itype))
12977 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12978 arg000, build_int_cst (itype, 0));
12979 /* Otherwise, of unsigned (logical) shifts,
12980 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12981 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12982 else
12983 return omit_one_operand_loc (loc, type,
12984 code == EQ_EXPR ? integer_one_node
12985 : integer_zero_node,
12986 arg000);
12990 /* If we have (A & C) == C where C is a power of 2, convert this into
12991 (A & C) != 0. Similarly for NE_EXPR. */
12992 if (TREE_CODE (arg0) == BIT_AND_EXPR
12993 && integer_pow2p (TREE_OPERAND (arg0, 1))
12994 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12995 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12996 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12997 integer_zero_node));
12999 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13000 bit, then fold the expression into A < 0 or A >= 0. */
13001 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13002 if (tem)
13003 return tem;
13005 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13006 Similarly for NE_EXPR. */
13007 if (TREE_CODE (arg0) == BIT_AND_EXPR
13008 && TREE_CODE (arg1) == INTEGER_CST
13009 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13011 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13012 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13013 TREE_OPERAND (arg0, 1));
13014 tree dandnotc
13015 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13016 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13017 notc);
13018 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13019 if (integer_nonzerop (dandnotc))
13020 return omit_one_operand_loc (loc, type, rslt, arg0);
13023 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13024 Similarly for NE_EXPR. */
13025 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13026 && TREE_CODE (arg1) == INTEGER_CST
13027 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13029 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13030 tree candnotd
13031 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13032 TREE_OPERAND (arg0, 1),
13033 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13034 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13035 if (integer_nonzerop (candnotd))
13036 return omit_one_operand_loc (loc, type, rslt, arg0);
13039 /* If this is a comparison of a field, we may be able to simplify it. */
13040 if ((TREE_CODE (arg0) == COMPONENT_REF
13041 || TREE_CODE (arg0) == BIT_FIELD_REF)
13042 /* Handle the constant case even without -O
13043 to make sure the warnings are given. */
13044 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13046 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13047 if (t1)
13048 return t1;
13051 /* Optimize comparisons of strlen vs zero to a compare of the
13052 first character of the string vs zero. To wit,
13053 strlen(ptr) == 0 => *ptr == 0
13054 strlen(ptr) != 0 => *ptr != 0
13055 Other cases should reduce to one of these two (or a constant)
13056 due to the return value of strlen being unsigned. */
13057 if (TREE_CODE (arg0) == CALL_EXPR
13058 && integer_zerop (arg1))
13060 tree fndecl = get_callee_fndecl (arg0);
13062 if (fndecl
13063 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13064 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13065 && call_expr_nargs (arg0) == 1
13066 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13068 tree iref = build_fold_indirect_ref_loc (loc,
13069 CALL_EXPR_ARG (arg0, 0));
13070 return fold_build2_loc (loc, code, type, iref,
13071 build_int_cst (TREE_TYPE (iref), 0));
13075 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13076 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13077 if (TREE_CODE (arg0) == RSHIFT_EXPR
13078 && integer_zerop (arg1)
13079 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13081 tree arg00 = TREE_OPERAND (arg0, 0);
13082 tree arg01 = TREE_OPERAND (arg0, 1);
13083 tree itype = TREE_TYPE (arg00);
13084 if (TREE_INT_CST_HIGH (arg01) == 0
13085 && !(TREE_CODE (itype) == COMPLEX_TYPE
13086 || TREE_CODE (itype) == VECTOR_TYPE)
13087 && TREE_INT_CST_LOW (arg01)
13088 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13090 if (TYPE_UNSIGNED (itype))
13092 itype = signed_type_for (itype);
13093 arg00 = fold_convert_loc (loc, itype, arg00);
13095 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13096 type, arg00, build_zero_cst (itype));
13100 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13101 if (integer_zerop (arg1)
13102 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13103 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13104 TREE_OPERAND (arg0, 1));
13106 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13107 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13108 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13109 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13110 build_zero_cst (TREE_TYPE (arg0)));
13111 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13112 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13113 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13114 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13115 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13116 build_zero_cst (TREE_TYPE (arg0)));
13118 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13119 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13120 && TREE_CODE (arg1) == INTEGER_CST
13121 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13122 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13123 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13124 TREE_OPERAND (arg0, 1), arg1));
13126 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13127 (X & C) == 0 when C is a single bit. */
13128 if (TREE_CODE (arg0) == BIT_AND_EXPR
13129 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13130 && integer_zerop (arg1)
13131 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13133 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13134 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13135 TREE_OPERAND (arg0, 1));
13136 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13137 type, tem,
13138 fold_convert_loc (loc, TREE_TYPE (arg0),
13139 arg1));
13142 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13143 constant C is a power of two, i.e. a single bit. */
13144 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13145 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13146 && integer_zerop (arg1)
13147 && integer_pow2p (TREE_OPERAND (arg0, 1))
13148 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13149 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13151 tree arg00 = TREE_OPERAND (arg0, 0);
13152 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13153 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13156 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13157 when is C is a power of two, i.e. a single bit. */
13158 if (TREE_CODE (arg0) == BIT_AND_EXPR
13159 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13160 && integer_zerop (arg1)
13161 && integer_pow2p (TREE_OPERAND (arg0, 1))
13162 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13163 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13165 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13166 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13167 arg000, TREE_OPERAND (arg0, 1));
13168 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13169 tem, build_int_cst (TREE_TYPE (tem), 0));
13172 if (integer_zerop (arg1)
13173 && tree_expr_nonzero_p (arg0))
13175 tree res = constant_boolean_node (code==NE_EXPR, type);
13176 return omit_one_operand_loc (loc, type, res, arg0);
13179 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13180 if (TREE_CODE (arg0) == NEGATE_EXPR
13181 && TREE_CODE (arg1) == NEGATE_EXPR)
13182 return fold_build2_loc (loc, code, type,
13183 TREE_OPERAND (arg0, 0),
13184 fold_convert_loc (loc, TREE_TYPE (arg0),
13185 TREE_OPERAND (arg1, 0)));
13187 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13188 if (TREE_CODE (arg0) == BIT_AND_EXPR
13189 && TREE_CODE (arg1) == BIT_AND_EXPR)
13191 tree arg00 = TREE_OPERAND (arg0, 0);
13192 tree arg01 = TREE_OPERAND (arg0, 1);
13193 tree arg10 = TREE_OPERAND (arg1, 0);
13194 tree arg11 = TREE_OPERAND (arg1, 1);
13195 tree itype = TREE_TYPE (arg0);
13197 if (operand_equal_p (arg01, arg11, 0))
13198 return fold_build2_loc (loc, code, type,
13199 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13200 fold_build2_loc (loc,
13201 BIT_XOR_EXPR, itype,
13202 arg00, arg10),
13203 arg01),
13204 build_zero_cst (itype));
13206 if (operand_equal_p (arg01, arg10, 0))
13207 return fold_build2_loc (loc, code, type,
13208 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13209 fold_build2_loc (loc,
13210 BIT_XOR_EXPR, itype,
13211 arg00, arg11),
13212 arg01),
13213 build_zero_cst (itype));
13215 if (operand_equal_p (arg00, arg11, 0))
13216 return fold_build2_loc (loc, code, type,
13217 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13218 fold_build2_loc (loc,
13219 BIT_XOR_EXPR, itype,
13220 arg01, arg10),
13221 arg00),
13222 build_zero_cst (itype));
13224 if (operand_equal_p (arg00, arg10, 0))
13225 return fold_build2_loc (loc, code, type,
13226 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13227 fold_build2_loc (loc,
13228 BIT_XOR_EXPR, itype,
13229 arg01, arg11),
13230 arg00),
13231 build_zero_cst (itype));
13234 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13235 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13237 tree arg00 = TREE_OPERAND (arg0, 0);
13238 tree arg01 = TREE_OPERAND (arg0, 1);
13239 tree arg10 = TREE_OPERAND (arg1, 0);
13240 tree arg11 = TREE_OPERAND (arg1, 1);
13241 tree itype = TREE_TYPE (arg0);
13243 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13244 operand_equal_p guarantees no side-effects so we don't need
13245 to use omit_one_operand on Z. */
13246 if (operand_equal_p (arg01, arg11, 0))
13247 return fold_build2_loc (loc, code, type, arg00,
13248 fold_convert_loc (loc, TREE_TYPE (arg00),
13249 arg10));
13250 if (operand_equal_p (arg01, arg10, 0))
13251 return fold_build2_loc (loc, code, type, arg00,
13252 fold_convert_loc (loc, TREE_TYPE (arg00),
13253 arg11));
13254 if (operand_equal_p (arg00, arg11, 0))
13255 return fold_build2_loc (loc, code, type, arg01,
13256 fold_convert_loc (loc, TREE_TYPE (arg01),
13257 arg10));
13258 if (operand_equal_p (arg00, arg10, 0))
13259 return fold_build2_loc (loc, code, type, arg01,
13260 fold_convert_loc (loc, TREE_TYPE (arg01),
13261 arg11));
13263 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13264 if (TREE_CODE (arg01) == INTEGER_CST
13265 && TREE_CODE (arg11) == INTEGER_CST)
13267 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13268 fold_convert_loc (loc, itype, arg11));
13269 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13270 return fold_build2_loc (loc, code, type, tem,
13271 fold_convert_loc (loc, itype, arg10));
13275 /* Attempt to simplify equality/inequality comparisons of complex
13276 values. Only lower the comparison if the result is known or
13277 can be simplified to a single scalar comparison. */
13278 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13279 || TREE_CODE (arg0) == COMPLEX_CST)
13280 && (TREE_CODE (arg1) == COMPLEX_EXPR
13281 || TREE_CODE (arg1) == COMPLEX_CST))
13283 tree real0, imag0, real1, imag1;
13284 tree rcond, icond;
13286 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13288 real0 = TREE_OPERAND (arg0, 0);
13289 imag0 = TREE_OPERAND (arg0, 1);
13291 else
13293 real0 = TREE_REALPART (arg0);
13294 imag0 = TREE_IMAGPART (arg0);
13297 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13299 real1 = TREE_OPERAND (arg1, 0);
13300 imag1 = TREE_OPERAND (arg1, 1);
13302 else
13304 real1 = TREE_REALPART (arg1);
13305 imag1 = TREE_IMAGPART (arg1);
13308 rcond = fold_binary_loc (loc, code, type, real0, real1);
13309 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13311 if (integer_zerop (rcond))
13313 if (code == EQ_EXPR)
13314 return omit_two_operands_loc (loc, type, boolean_false_node,
13315 imag0, imag1);
13316 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13318 else
13320 if (code == NE_EXPR)
13321 return omit_two_operands_loc (loc, type, boolean_true_node,
13322 imag0, imag1);
13323 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13327 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13328 if (icond && TREE_CODE (icond) == INTEGER_CST)
13330 if (integer_zerop (icond))
13332 if (code == EQ_EXPR)
13333 return omit_two_operands_loc (loc, type, boolean_false_node,
13334 real0, real1);
13335 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13337 else
13339 if (code == NE_EXPR)
13340 return omit_two_operands_loc (loc, type, boolean_true_node,
13341 real0, real1);
13342 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13347 return NULL_TREE;
13349 case LT_EXPR:
13350 case GT_EXPR:
13351 case LE_EXPR:
13352 case GE_EXPR:
13353 tem = fold_comparison (loc, code, type, op0, op1);
13354 if (tem != NULL_TREE)
13355 return tem;
13357 /* Transform comparisons of the form X +- C CMP X. */
13358 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13359 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13360 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13361 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13362 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13363 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13365 tree arg01 = TREE_OPERAND (arg0, 1);
13366 enum tree_code code0 = TREE_CODE (arg0);
13367 int is_positive;
13369 if (TREE_CODE (arg01) == REAL_CST)
13370 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13371 else
13372 is_positive = tree_int_cst_sgn (arg01);
13374 /* (X - c) > X becomes false. */
13375 if (code == GT_EXPR
13376 && ((code0 == MINUS_EXPR && is_positive >= 0)
13377 || (code0 == PLUS_EXPR && is_positive <= 0)))
13379 if (TREE_CODE (arg01) == INTEGER_CST
13380 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13381 fold_overflow_warning (("assuming signed overflow does not "
13382 "occur when assuming that (X - c) > X "
13383 "is always false"),
13384 WARN_STRICT_OVERFLOW_ALL);
13385 return constant_boolean_node (0, type);
13388 /* Likewise (X + c) < X becomes false. */
13389 if (code == LT_EXPR
13390 && ((code0 == PLUS_EXPR && is_positive >= 0)
13391 || (code0 == MINUS_EXPR && is_positive <= 0)))
13393 if (TREE_CODE (arg01) == INTEGER_CST
13394 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13395 fold_overflow_warning (("assuming signed overflow does not "
13396 "occur when assuming that "
13397 "(X + c) < X is always false"),
13398 WARN_STRICT_OVERFLOW_ALL);
13399 return constant_boolean_node (0, type);
13402 /* Convert (X - c) <= X to true. */
13403 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13404 && code == LE_EXPR
13405 && ((code0 == MINUS_EXPR && is_positive >= 0)
13406 || (code0 == PLUS_EXPR && is_positive <= 0)))
13408 if (TREE_CODE (arg01) == INTEGER_CST
13409 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13410 fold_overflow_warning (("assuming signed overflow does not "
13411 "occur when assuming that "
13412 "(X - c) <= X is always true"),
13413 WARN_STRICT_OVERFLOW_ALL);
13414 return constant_boolean_node (1, type);
13417 /* Convert (X + c) >= X to true. */
13418 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13419 && code == GE_EXPR
13420 && ((code0 == PLUS_EXPR && is_positive >= 0)
13421 || (code0 == MINUS_EXPR && is_positive <= 0)))
13423 if (TREE_CODE (arg01) == INTEGER_CST
13424 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13425 fold_overflow_warning (("assuming signed overflow does not "
13426 "occur when assuming that "
13427 "(X + c) >= X is always true"),
13428 WARN_STRICT_OVERFLOW_ALL);
13429 return constant_boolean_node (1, type);
13432 if (TREE_CODE (arg01) == INTEGER_CST)
13434 /* Convert X + c > X and X - c < X to true for integers. */
13435 if (code == GT_EXPR
13436 && ((code0 == PLUS_EXPR && is_positive > 0)
13437 || (code0 == MINUS_EXPR && is_positive < 0)))
13439 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13440 fold_overflow_warning (("assuming signed overflow does "
13441 "not occur when assuming that "
13442 "(X + c) > X is always true"),
13443 WARN_STRICT_OVERFLOW_ALL);
13444 return constant_boolean_node (1, type);
13447 if (code == LT_EXPR
13448 && ((code0 == MINUS_EXPR && is_positive > 0)
13449 || (code0 == PLUS_EXPR && is_positive < 0)))
13451 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13452 fold_overflow_warning (("assuming signed overflow does "
13453 "not occur when assuming that "
13454 "(X - c) < X is always true"),
13455 WARN_STRICT_OVERFLOW_ALL);
13456 return constant_boolean_node (1, type);
13459 /* Convert X + c <= X and X - c >= X to false for integers. */
13460 if (code == LE_EXPR
13461 && ((code0 == PLUS_EXPR && is_positive > 0)
13462 || (code0 == MINUS_EXPR && is_positive < 0)))
13464 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13465 fold_overflow_warning (("assuming signed overflow does "
13466 "not occur when assuming that "
13467 "(X + c) <= X is always false"),
13468 WARN_STRICT_OVERFLOW_ALL);
13469 return constant_boolean_node (0, type);
13472 if (code == GE_EXPR
13473 && ((code0 == MINUS_EXPR && is_positive > 0)
13474 || (code0 == PLUS_EXPR && is_positive < 0)))
13476 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13477 fold_overflow_warning (("assuming signed overflow does "
13478 "not occur when assuming that "
13479 "(X - c) >= X is always false"),
13480 WARN_STRICT_OVERFLOW_ALL);
13481 return constant_boolean_node (0, type);
13486 /* Comparisons with the highest or lowest possible integer of
13487 the specified precision will have known values. */
13489 tree arg1_type = TREE_TYPE (arg1);
13490 unsigned int width = TYPE_PRECISION (arg1_type);
13492 if (TREE_CODE (arg1) == INTEGER_CST
13493 && width <= HOST_BITS_PER_DOUBLE_INT
13494 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13496 HOST_WIDE_INT signed_max_hi;
13497 unsigned HOST_WIDE_INT signed_max_lo;
13498 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13500 if (width <= HOST_BITS_PER_WIDE_INT)
13502 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13503 - 1;
13504 signed_max_hi = 0;
13505 max_hi = 0;
13507 if (TYPE_UNSIGNED (arg1_type))
13509 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13510 min_lo = 0;
13511 min_hi = 0;
13513 else
13515 max_lo = signed_max_lo;
13516 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13517 min_hi = -1;
13520 else
13522 width -= HOST_BITS_PER_WIDE_INT;
13523 signed_max_lo = -1;
13524 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13525 - 1;
13526 max_lo = -1;
13527 min_lo = 0;
13529 if (TYPE_UNSIGNED (arg1_type))
13531 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13532 min_hi = 0;
13534 else
13536 max_hi = signed_max_hi;
13537 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13541 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13542 && TREE_INT_CST_LOW (arg1) == max_lo)
13543 switch (code)
13545 case GT_EXPR:
13546 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13548 case GE_EXPR:
13549 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13551 case LE_EXPR:
13552 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13554 case LT_EXPR:
13555 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13557 /* The GE_EXPR and LT_EXPR cases above are not normally
13558 reached because of previous transformations. */
13560 default:
13561 break;
13563 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13564 == max_hi
13565 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13566 switch (code)
13568 case GT_EXPR:
13569 arg1 = const_binop (PLUS_EXPR, arg1,
13570 build_int_cst (TREE_TYPE (arg1), 1));
13571 return fold_build2_loc (loc, EQ_EXPR, type,
13572 fold_convert_loc (loc,
13573 TREE_TYPE (arg1), arg0),
13574 arg1);
13575 case LE_EXPR:
13576 arg1 = const_binop (PLUS_EXPR, arg1,
13577 build_int_cst (TREE_TYPE (arg1), 1));
13578 return fold_build2_loc (loc, NE_EXPR, type,
13579 fold_convert_loc (loc, TREE_TYPE (arg1),
13580 arg0),
13581 arg1);
13582 default:
13583 break;
13585 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13586 == min_hi
13587 && TREE_INT_CST_LOW (arg1) == min_lo)
13588 switch (code)
13590 case LT_EXPR:
13591 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13593 case LE_EXPR:
13594 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13596 case GE_EXPR:
13597 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13599 case GT_EXPR:
13600 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13602 default:
13603 break;
13605 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13606 == min_hi
13607 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13608 switch (code)
13610 case GE_EXPR:
13611 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13612 return fold_build2_loc (loc, NE_EXPR, type,
13613 fold_convert_loc (loc,
13614 TREE_TYPE (arg1), arg0),
13615 arg1);
13616 case LT_EXPR:
13617 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13618 return fold_build2_loc (loc, EQ_EXPR, type,
13619 fold_convert_loc (loc, TREE_TYPE (arg1),
13620 arg0),
13621 arg1);
13622 default:
13623 break;
13626 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13627 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13628 && TYPE_UNSIGNED (arg1_type)
13629 /* We will flip the signedness of the comparison operator
13630 associated with the mode of arg1, so the sign bit is
13631 specified by this mode. Check that arg1 is the signed
13632 max associated with this sign bit. */
13633 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13634 /* signed_type does not work on pointer types. */
13635 && INTEGRAL_TYPE_P (arg1_type))
13637 /* The following case also applies to X < signed_max+1
13638 and X >= signed_max+1 because previous transformations. */
13639 if (code == LE_EXPR || code == GT_EXPR)
13641 tree st;
13642 st = signed_type_for (TREE_TYPE (arg1));
13643 return fold_build2_loc (loc,
13644 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13645 type, fold_convert_loc (loc, st, arg0),
13646 build_int_cst (st, 0));
13652 /* If we are comparing an ABS_EXPR with a constant, we can
13653 convert all the cases into explicit comparisons, but they may
13654 well not be faster than doing the ABS and one comparison.
13655 But ABS (X) <= C is a range comparison, which becomes a subtraction
13656 and a comparison, and is probably faster. */
13657 if (code == LE_EXPR
13658 && TREE_CODE (arg1) == INTEGER_CST
13659 && TREE_CODE (arg0) == ABS_EXPR
13660 && ! TREE_SIDE_EFFECTS (arg0)
13661 && (0 != (tem = negate_expr (arg1)))
13662 && TREE_CODE (tem) == INTEGER_CST
13663 && !TREE_OVERFLOW (tem))
13664 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13665 build2 (GE_EXPR, type,
13666 TREE_OPERAND (arg0, 0), tem),
13667 build2 (LE_EXPR, type,
13668 TREE_OPERAND (arg0, 0), arg1));
13670 /* Convert ABS_EXPR<x> >= 0 to true. */
13671 strict_overflow_p = false;
13672 if (code == GE_EXPR
13673 && (integer_zerop (arg1)
13674 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13675 && real_zerop (arg1)))
13676 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13678 if (strict_overflow_p)
13679 fold_overflow_warning (("assuming signed overflow does not occur "
13680 "when simplifying comparison of "
13681 "absolute value and zero"),
13682 WARN_STRICT_OVERFLOW_CONDITIONAL);
13683 return omit_one_operand_loc (loc, type,
13684 constant_boolean_node (true, type),
13685 arg0);
13688 /* Convert ABS_EXPR<x> < 0 to false. */
13689 strict_overflow_p = false;
13690 if (code == LT_EXPR
13691 && (integer_zerop (arg1) || real_zerop (arg1))
13692 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13694 if (strict_overflow_p)
13695 fold_overflow_warning (("assuming signed overflow does not occur "
13696 "when simplifying comparison of "
13697 "absolute value and zero"),
13698 WARN_STRICT_OVERFLOW_CONDITIONAL);
13699 return omit_one_operand_loc (loc, type,
13700 constant_boolean_node (false, type),
13701 arg0);
13704 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13705 and similarly for >= into !=. */
13706 if ((code == LT_EXPR || code == GE_EXPR)
13707 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13708 && TREE_CODE (arg1) == LSHIFT_EXPR
13709 && integer_onep (TREE_OPERAND (arg1, 0)))
13710 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13711 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13712 TREE_OPERAND (arg1, 1)),
13713 build_zero_cst (TREE_TYPE (arg0)));
13715 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13716 otherwise Y might be >= # of bits in X's type and thus e.g.
13717 (unsigned char) (1 << Y) for Y 15 might be 0.
13718 If the cast is widening, then 1 << Y should have unsigned type,
13719 otherwise if Y is number of bits in the signed shift type minus 1,
13720 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13721 31 might be 0xffffffff80000000. */
13722 if ((code == LT_EXPR || code == GE_EXPR)
13723 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13724 && CONVERT_EXPR_P (arg1)
13725 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13726 && (TYPE_PRECISION (TREE_TYPE (arg1))
13727 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13728 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13729 || (TYPE_PRECISION (TREE_TYPE (arg1))
13730 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13731 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13733 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13734 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13735 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13736 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13737 build_zero_cst (TREE_TYPE (arg0)));
13740 return NULL_TREE;
13742 case UNORDERED_EXPR:
13743 case ORDERED_EXPR:
13744 case UNLT_EXPR:
13745 case UNLE_EXPR:
13746 case UNGT_EXPR:
13747 case UNGE_EXPR:
13748 case UNEQ_EXPR:
13749 case LTGT_EXPR:
13750 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13752 t1 = fold_relational_const (code, type, arg0, arg1);
13753 if (t1 != NULL_TREE)
13754 return t1;
13757 /* If the first operand is NaN, the result is constant. */
13758 if (TREE_CODE (arg0) == REAL_CST
13759 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13760 && (code != LTGT_EXPR || ! flag_trapping_math))
13762 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13763 ? integer_zero_node
13764 : integer_one_node;
13765 return omit_one_operand_loc (loc, type, t1, arg1);
13768 /* If the second operand is NaN, the result is constant. */
13769 if (TREE_CODE (arg1) == REAL_CST
13770 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13771 && (code != LTGT_EXPR || ! flag_trapping_math))
13773 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13774 ? integer_zero_node
13775 : integer_one_node;
13776 return omit_one_operand_loc (loc, type, t1, arg0);
13779 /* Simplify unordered comparison of something with itself. */
13780 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13781 && operand_equal_p (arg0, arg1, 0))
13782 return constant_boolean_node (1, type);
13784 if (code == LTGT_EXPR
13785 && !flag_trapping_math
13786 && operand_equal_p (arg0, arg1, 0))
13787 return constant_boolean_node (0, type);
13789 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13791 tree targ0 = strip_float_extensions (arg0);
13792 tree targ1 = strip_float_extensions (arg1);
13793 tree newtype = TREE_TYPE (targ0);
13795 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13796 newtype = TREE_TYPE (targ1);
13798 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13799 return fold_build2_loc (loc, code, type,
13800 fold_convert_loc (loc, newtype, targ0),
13801 fold_convert_loc (loc, newtype, targ1));
13804 return NULL_TREE;
13806 case COMPOUND_EXPR:
13807 /* When pedantic, a compound expression can be neither an lvalue
13808 nor an integer constant expression. */
13809 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13810 return NULL_TREE;
13811 /* Don't let (0, 0) be null pointer constant. */
13812 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13813 : fold_convert_loc (loc, type, arg1);
13814 return pedantic_non_lvalue_loc (loc, tem);
13816 case COMPLEX_EXPR:
13817 if ((TREE_CODE (arg0) == REAL_CST
13818 && TREE_CODE (arg1) == REAL_CST)
13819 || (TREE_CODE (arg0) == INTEGER_CST
13820 && TREE_CODE (arg1) == INTEGER_CST))
13821 return build_complex (type, arg0, arg1);
13822 if (TREE_CODE (arg0) == REALPART_EXPR
13823 && TREE_CODE (arg1) == IMAGPART_EXPR
13824 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13825 && operand_equal_p (TREE_OPERAND (arg0, 0),
13826 TREE_OPERAND (arg1, 0), 0))
13827 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13828 TREE_OPERAND (arg1, 0));
13829 return NULL_TREE;
13831 case ASSERT_EXPR:
13832 /* An ASSERT_EXPR should never be passed to fold_binary. */
13833 gcc_unreachable ();
13835 case VEC_PACK_TRUNC_EXPR:
13836 case VEC_PACK_FIX_TRUNC_EXPR:
13838 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13839 tree *elts;
13841 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13842 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13843 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13844 return NULL_TREE;
13846 elts = XALLOCAVEC (tree, nelts);
13847 if (!vec_cst_ctor_to_array (arg0, elts)
13848 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13849 return NULL_TREE;
13851 for (i = 0; i < nelts; i++)
13853 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13854 ? NOP_EXPR : FIX_TRUNC_EXPR,
13855 TREE_TYPE (type), elts[i]);
13856 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13857 return NULL_TREE;
13860 return build_vector (type, elts);
13863 case VEC_WIDEN_MULT_LO_EXPR:
13864 case VEC_WIDEN_MULT_HI_EXPR:
13865 case VEC_WIDEN_MULT_EVEN_EXPR:
13866 case VEC_WIDEN_MULT_ODD_EXPR:
13868 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13869 unsigned int out, ofs, scale;
13870 tree *elts;
13872 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13873 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13874 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13875 return NULL_TREE;
13877 elts = XALLOCAVEC (tree, nelts * 4);
13878 if (!vec_cst_ctor_to_array (arg0, elts)
13879 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13880 return NULL_TREE;
13882 if (code == VEC_WIDEN_MULT_LO_EXPR)
13883 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13884 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13885 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13886 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13887 scale = 1, ofs = 0;
13888 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13889 scale = 1, ofs = 1;
13891 for (out = 0; out < nelts; out++)
13893 unsigned int in1 = (out << scale) + ofs;
13894 unsigned int in2 = in1 + nelts * 2;
13895 tree t1, t2;
13897 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13898 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13900 if (t1 == NULL_TREE || t2 == NULL_TREE)
13901 return NULL_TREE;
13902 elts[out] = const_binop (MULT_EXPR, t1, t2);
13903 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13904 return NULL_TREE;
13907 return build_vector (type, elts);
13910 default:
13911 return NULL_TREE;
13912 } /* switch (code) */
13915 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13916 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13917 of GOTO_EXPR. */
13919 static tree
13920 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13922 switch (TREE_CODE (*tp))
13924 case LABEL_EXPR:
13925 return *tp;
13927 case GOTO_EXPR:
13928 *walk_subtrees = 0;
13930 /* ... fall through ... */
13932 default:
13933 return NULL_TREE;
13937 /* Return whether the sub-tree ST contains a label which is accessible from
13938 outside the sub-tree. */
13940 static bool
13941 contains_label_p (tree st)
13943 return
13944 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13947 /* Fold a ternary expression of code CODE and type TYPE with operands
13948 OP0, OP1, and OP2. Return the folded expression if folding is
13949 successful. Otherwise, return NULL_TREE. */
13951 tree
13952 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13953 tree op0, tree op1, tree op2)
13955 tree tem;
13956 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13957 enum tree_code_class kind = TREE_CODE_CLASS (code);
13959 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13960 && TREE_CODE_LENGTH (code) == 3);
13962 /* Strip any conversions that don't change the mode. This is safe
13963 for every expression, except for a comparison expression because
13964 its signedness is derived from its operands. So, in the latter
13965 case, only strip conversions that don't change the signedness.
13967 Note that this is done as an internal manipulation within the
13968 constant folder, in order to find the simplest representation of
13969 the arguments so that their form can be studied. In any cases,
13970 the appropriate type conversions should be put back in the tree
13971 that will get out of the constant folder. */
13972 if (op0)
13974 arg0 = op0;
13975 STRIP_NOPS (arg0);
13978 if (op1)
13980 arg1 = op1;
13981 STRIP_NOPS (arg1);
13984 if (op2)
13986 arg2 = op2;
13987 STRIP_NOPS (arg2);
13990 switch (code)
13992 case COMPONENT_REF:
13993 if (TREE_CODE (arg0) == CONSTRUCTOR
13994 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13996 unsigned HOST_WIDE_INT idx;
13997 tree field, value;
13998 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13999 if (field == arg1)
14000 return value;
14002 return NULL_TREE;
14004 case COND_EXPR:
14005 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14006 so all simple results must be passed through pedantic_non_lvalue. */
14007 if (TREE_CODE (arg0) == INTEGER_CST)
14009 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14010 tem = integer_zerop (arg0) ? op2 : op1;
14011 /* Only optimize constant conditions when the selected branch
14012 has the same type as the COND_EXPR. This avoids optimizing
14013 away "c ? x : throw", where the throw has a void type.
14014 Avoid throwing away that operand which contains label. */
14015 if ((!TREE_SIDE_EFFECTS (unused_op)
14016 || !contains_label_p (unused_op))
14017 && (! VOID_TYPE_P (TREE_TYPE (tem))
14018 || VOID_TYPE_P (type)))
14019 return pedantic_non_lvalue_loc (loc, tem);
14020 return NULL_TREE;
14022 if (operand_equal_p (arg1, op2, 0))
14023 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14025 /* If we have A op B ? A : C, we may be able to convert this to a
14026 simpler expression, depending on the operation and the values
14027 of B and C. Signed zeros prevent all of these transformations,
14028 for reasons given above each one.
14030 Also try swapping the arguments and inverting the conditional. */
14031 if (COMPARISON_CLASS_P (arg0)
14032 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14033 arg1, TREE_OPERAND (arg0, 1))
14034 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14036 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14037 if (tem)
14038 return tem;
14041 if (COMPARISON_CLASS_P (arg0)
14042 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14043 op2,
14044 TREE_OPERAND (arg0, 1))
14045 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14047 location_t loc0 = expr_location_or (arg0, loc);
14048 tem = fold_truth_not_expr (loc0, arg0);
14049 if (tem && COMPARISON_CLASS_P (tem))
14051 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14052 if (tem)
14053 return tem;
14057 /* If the second operand is simpler than the third, swap them
14058 since that produces better jump optimization results. */
14059 if (truth_value_p (TREE_CODE (arg0))
14060 && tree_swap_operands_p (op1, op2, false))
14062 location_t loc0 = expr_location_or (arg0, loc);
14063 /* See if this can be inverted. If it can't, possibly because
14064 it was a floating-point inequality comparison, don't do
14065 anything. */
14066 tem = fold_truth_not_expr (loc0, arg0);
14067 if (tem)
14068 return fold_build3_loc (loc, code, type, tem, op2, op1);
14071 /* Convert A ? 1 : 0 to simply A. */
14072 if (integer_onep (op1)
14073 && integer_zerop (op2)
14074 /* If we try to convert OP0 to our type, the
14075 call to fold will try to move the conversion inside
14076 a COND, which will recurse. In that case, the COND_EXPR
14077 is probably the best choice, so leave it alone. */
14078 && type == TREE_TYPE (arg0))
14079 return pedantic_non_lvalue_loc (loc, arg0);
14081 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14082 over COND_EXPR in cases such as floating point comparisons. */
14083 if (integer_zerop (op1)
14084 && integer_onep (op2)
14085 && truth_value_p (TREE_CODE (arg0)))
14086 return pedantic_non_lvalue_loc (loc,
14087 fold_convert_loc (loc, type,
14088 invert_truthvalue_loc (loc,
14089 arg0)));
14091 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14092 if (TREE_CODE (arg0) == LT_EXPR
14093 && integer_zerop (TREE_OPERAND (arg0, 1))
14094 && integer_zerop (op2)
14095 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14097 /* sign_bit_p looks through both zero and sign extensions,
14098 but for this optimization only sign extensions are
14099 usable. */
14100 tree tem2 = TREE_OPERAND (arg0, 0);
14101 while (tem != tem2)
14103 if (TREE_CODE (tem2) != NOP_EXPR
14104 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14106 tem = NULL_TREE;
14107 break;
14109 tem2 = TREE_OPERAND (tem2, 0);
14111 /* sign_bit_p only checks ARG1 bits within A's precision.
14112 If <sign bit of A> has wider type than A, bits outside
14113 of A's precision in <sign bit of A> need to be checked.
14114 If they are all 0, this optimization needs to be done
14115 in unsigned A's type, if they are all 1 in signed A's type,
14116 otherwise this can't be done. */
14117 if (tem
14118 && TYPE_PRECISION (TREE_TYPE (tem))
14119 < TYPE_PRECISION (TREE_TYPE (arg1))
14120 && TYPE_PRECISION (TREE_TYPE (tem))
14121 < TYPE_PRECISION (type))
14123 unsigned HOST_WIDE_INT mask_lo;
14124 HOST_WIDE_INT mask_hi;
14125 int inner_width, outer_width;
14126 tree tem_type;
14128 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14129 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14130 if (outer_width > TYPE_PRECISION (type))
14131 outer_width = TYPE_PRECISION (type);
14133 if (outer_width > HOST_BITS_PER_WIDE_INT)
14135 mask_hi = ((unsigned HOST_WIDE_INT) -1
14136 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14137 mask_lo = -1;
14139 else
14141 mask_hi = 0;
14142 mask_lo = ((unsigned HOST_WIDE_INT) -1
14143 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14145 if (inner_width > HOST_BITS_PER_WIDE_INT)
14147 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14148 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14149 mask_lo = 0;
14151 else
14152 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14153 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14155 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14156 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14158 tem_type = signed_type_for (TREE_TYPE (tem));
14159 tem = fold_convert_loc (loc, tem_type, tem);
14161 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14162 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14164 tem_type = unsigned_type_for (TREE_TYPE (tem));
14165 tem = fold_convert_loc (loc, tem_type, tem);
14167 else
14168 tem = NULL;
14171 if (tem)
14172 return
14173 fold_convert_loc (loc, type,
14174 fold_build2_loc (loc, BIT_AND_EXPR,
14175 TREE_TYPE (tem), tem,
14176 fold_convert_loc (loc,
14177 TREE_TYPE (tem),
14178 arg1)));
14181 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14182 already handled above. */
14183 if (TREE_CODE (arg0) == BIT_AND_EXPR
14184 && integer_onep (TREE_OPERAND (arg0, 1))
14185 && integer_zerop (op2)
14186 && integer_pow2p (arg1))
14188 tree tem = TREE_OPERAND (arg0, 0);
14189 STRIP_NOPS (tem);
14190 if (TREE_CODE (tem) == RSHIFT_EXPR
14191 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14192 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14193 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14194 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14195 TREE_OPERAND (tem, 0), arg1);
14198 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14199 is probably obsolete because the first operand should be a
14200 truth value (that's why we have the two cases above), but let's
14201 leave it in until we can confirm this for all front-ends. */
14202 if (integer_zerop (op2)
14203 && TREE_CODE (arg0) == NE_EXPR
14204 && integer_zerop (TREE_OPERAND (arg0, 1))
14205 && integer_pow2p (arg1)
14206 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14207 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14208 arg1, OEP_ONLY_CONST))
14209 return pedantic_non_lvalue_loc (loc,
14210 fold_convert_loc (loc, type,
14211 TREE_OPERAND (arg0, 0)));
14213 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14214 if (integer_zerop (op2)
14215 && truth_value_p (TREE_CODE (arg0))
14216 && truth_value_p (TREE_CODE (arg1)))
14217 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14218 fold_convert_loc (loc, type, arg0),
14219 arg1);
14221 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14222 if (integer_onep (op2)
14223 && truth_value_p (TREE_CODE (arg0))
14224 && truth_value_p (TREE_CODE (arg1)))
14226 location_t loc0 = expr_location_or (arg0, loc);
14227 /* Only perform transformation if ARG0 is easily inverted. */
14228 tem = fold_truth_not_expr (loc0, arg0);
14229 if (tem)
14230 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14231 fold_convert_loc (loc, type, tem),
14232 arg1);
14235 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14236 if (integer_zerop (arg1)
14237 && truth_value_p (TREE_CODE (arg0))
14238 && truth_value_p (TREE_CODE (op2)))
14240 location_t loc0 = expr_location_or (arg0, loc);
14241 /* Only perform transformation if ARG0 is easily inverted. */
14242 tem = fold_truth_not_expr (loc0, arg0);
14243 if (tem)
14244 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14245 fold_convert_loc (loc, type, tem),
14246 op2);
14249 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14250 if (integer_onep (arg1)
14251 && truth_value_p (TREE_CODE (arg0))
14252 && truth_value_p (TREE_CODE (op2)))
14253 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14254 fold_convert_loc (loc, type, arg0),
14255 op2);
14257 return NULL_TREE;
14259 case VEC_COND_EXPR:
14260 if (TREE_CODE (arg0) == VECTOR_CST)
14262 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14263 return pedantic_non_lvalue_loc (loc, op1);
14264 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14265 return pedantic_non_lvalue_loc (loc, op2);
14267 return NULL_TREE;
14269 case CALL_EXPR:
14270 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14271 of fold_ternary on them. */
14272 gcc_unreachable ();
14274 case BIT_FIELD_REF:
14275 if ((TREE_CODE (arg0) == VECTOR_CST
14276 || (TREE_CODE (arg0) == CONSTRUCTOR
14277 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14278 && (type == TREE_TYPE (TREE_TYPE (arg0))
14279 || (TREE_CODE (type) == VECTOR_TYPE
14280 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14282 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14283 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14284 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14285 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14287 if (n != 0
14288 && (idx % width) == 0
14289 && (n % width) == 0
14290 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14292 idx = idx / width;
14293 n = n / width;
14295 if (TREE_CODE (arg0) == VECTOR_CST)
14297 if (n == 1)
14298 return VECTOR_CST_ELT (arg0, idx);
14300 tree *vals = XALLOCAVEC (tree, n);
14301 for (unsigned i = 0; i < n; ++i)
14302 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14303 return build_vector (type, vals);
14306 /* Constructor elements can be subvectors. */
14307 unsigned HOST_WIDE_INT k = 1;
14308 if (CONSTRUCTOR_NELTS (arg0) != 0)
14310 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14311 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14312 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14315 /* We keep an exact subset of the constructor elements. */
14316 if ((idx % k) == 0 && (n % k) == 0)
14318 if (CONSTRUCTOR_NELTS (arg0) == 0)
14319 return build_constructor (type, NULL);
14320 idx /= k;
14321 n /= k;
14322 if (n == 1)
14324 if (idx < CONSTRUCTOR_NELTS (arg0))
14325 return CONSTRUCTOR_ELT (arg0, idx)->value;
14326 return build_zero_cst (type);
14329 vec<constructor_elt, va_gc> *vals;
14330 vec_alloc (vals, n);
14331 for (unsigned i = 0;
14332 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14333 ++i)
14334 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14335 CONSTRUCTOR_ELT
14336 (arg0, idx + i)->value);
14337 return build_constructor (type, vals);
14339 /* The bitfield references a single constructor element. */
14340 else if (idx + n <= (idx / k + 1) * k)
14342 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14343 return build_zero_cst (type);
14344 else if (n == k)
14345 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14346 else
14347 return fold_build3_loc (loc, code, type,
14348 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14349 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14354 /* A bit-field-ref that referenced the full argument can be stripped. */
14355 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14356 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14357 && integer_zerop (op2))
14358 return fold_convert_loc (loc, type, arg0);
14360 /* On constants we can use native encode/interpret to constant
14361 fold (nearly) all BIT_FIELD_REFs. */
14362 if (CONSTANT_CLASS_P (arg0)
14363 && can_native_interpret_type_p (type)
14364 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14365 /* This limitation should not be necessary, we just need to
14366 round this up to mode size. */
14367 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14368 /* Need bit-shifting of the buffer to relax the following. */
14369 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14371 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14372 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14373 unsigned HOST_WIDE_INT clen;
14374 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14375 /* ??? We cannot tell native_encode_expr to start at
14376 some random byte only. So limit us to a reasonable amount
14377 of work. */
14378 if (clen <= 4096)
14380 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14381 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14382 if (len > 0
14383 && len * BITS_PER_UNIT >= bitpos + bitsize)
14385 tree v = native_interpret_expr (type,
14386 b + bitpos / BITS_PER_UNIT,
14387 bitsize / BITS_PER_UNIT);
14388 if (v)
14389 return v;
14394 return NULL_TREE;
14396 case FMA_EXPR:
14397 /* For integers we can decompose the FMA if possible. */
14398 if (TREE_CODE (arg0) == INTEGER_CST
14399 && TREE_CODE (arg1) == INTEGER_CST)
14400 return fold_build2_loc (loc, PLUS_EXPR, type,
14401 const_binop (MULT_EXPR, arg0, arg1), arg2);
14402 if (integer_zerop (arg2))
14403 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14405 return fold_fma (loc, type, arg0, arg1, arg2);
14407 case VEC_PERM_EXPR:
14408 if (TREE_CODE (arg2) == VECTOR_CST)
14410 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14411 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14412 tree t;
14413 bool need_mask_canon = false;
14414 bool all_in_vec0 = true;
14415 bool all_in_vec1 = true;
14416 bool maybe_identity = true;
14417 bool single_arg = (op0 == op1);
14418 bool changed = false;
14420 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14421 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14422 for (i = 0; i < nelts; i++)
14424 tree val = VECTOR_CST_ELT (arg2, i);
14425 if (TREE_CODE (val) != INTEGER_CST)
14426 return NULL_TREE;
14428 sel[i] = TREE_INT_CST_LOW (val) & mask;
14429 if (TREE_INT_CST_HIGH (val)
14430 || ((unsigned HOST_WIDE_INT)
14431 TREE_INT_CST_LOW (val) != sel[i]))
14432 need_mask_canon = true;
14434 if (sel[i] < nelts)
14435 all_in_vec1 = false;
14436 else
14437 all_in_vec0 = false;
14439 if ((sel[i] & (nelts-1)) != i)
14440 maybe_identity = false;
14443 if (maybe_identity)
14445 if (all_in_vec0)
14446 return op0;
14447 if (all_in_vec1)
14448 return op1;
14451 if (all_in_vec0)
14452 op1 = op0;
14453 else if (all_in_vec1)
14455 op0 = op1;
14456 for (i = 0; i < nelts; i++)
14457 sel[i] -= nelts;
14458 need_mask_canon = true;
14461 if ((TREE_CODE (op0) == VECTOR_CST
14462 || TREE_CODE (op0) == CONSTRUCTOR)
14463 && (TREE_CODE (op1) == VECTOR_CST
14464 || TREE_CODE (op1) == CONSTRUCTOR))
14466 t = fold_vec_perm (type, op0, op1, sel);
14467 if (t != NULL_TREE)
14468 return t;
14471 if (op0 == op1 && !single_arg)
14472 changed = true;
14474 if (need_mask_canon && arg2 == op2)
14476 tree *tsel = XALLOCAVEC (tree, nelts);
14477 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14478 for (i = 0; i < nelts; i++)
14479 tsel[i] = build_int_cst (eltype, sel[i]);
14480 op2 = build_vector (TREE_TYPE (arg2), tsel);
14481 changed = true;
14484 if (changed)
14485 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14487 return NULL_TREE;
14489 default:
14490 return NULL_TREE;
14491 } /* switch (code) */
14494 /* Perform constant folding and related simplification of EXPR.
14495 The related simplifications include x*1 => x, x*0 => 0, etc.,
14496 and application of the associative law.
14497 NOP_EXPR conversions may be removed freely (as long as we
14498 are careful not to change the type of the overall expression).
14499 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14500 but we can constant-fold them if they have constant operands. */
14502 #ifdef ENABLE_FOLD_CHECKING
14503 # define fold(x) fold_1 (x)
14504 static tree fold_1 (tree);
14505 static
14506 #endif
14507 tree
14508 fold (tree expr)
14510 const tree t = expr;
14511 enum tree_code code = TREE_CODE (t);
14512 enum tree_code_class kind = TREE_CODE_CLASS (code);
14513 tree tem;
14514 location_t loc = EXPR_LOCATION (expr);
14516 /* Return right away if a constant. */
14517 if (kind == tcc_constant)
14518 return t;
14520 /* CALL_EXPR-like objects with variable numbers of operands are
14521 treated specially. */
14522 if (kind == tcc_vl_exp)
14524 if (code == CALL_EXPR)
14526 tem = fold_call_expr (loc, expr, false);
14527 return tem ? tem : expr;
14529 return expr;
14532 if (IS_EXPR_CODE_CLASS (kind))
14534 tree type = TREE_TYPE (t);
14535 tree op0, op1, op2;
14537 switch (TREE_CODE_LENGTH (code))
14539 case 1:
14540 op0 = TREE_OPERAND (t, 0);
14541 tem = fold_unary_loc (loc, code, type, op0);
14542 return tem ? tem : expr;
14543 case 2:
14544 op0 = TREE_OPERAND (t, 0);
14545 op1 = TREE_OPERAND (t, 1);
14546 tem = fold_binary_loc (loc, code, type, op0, op1);
14547 return tem ? tem : expr;
14548 case 3:
14549 op0 = TREE_OPERAND (t, 0);
14550 op1 = TREE_OPERAND (t, 1);
14551 op2 = TREE_OPERAND (t, 2);
14552 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14553 return tem ? tem : expr;
14554 default:
14555 break;
14559 switch (code)
14561 case ARRAY_REF:
14563 tree op0 = TREE_OPERAND (t, 0);
14564 tree op1 = TREE_OPERAND (t, 1);
14566 if (TREE_CODE (op1) == INTEGER_CST
14567 && TREE_CODE (op0) == CONSTRUCTOR
14568 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14570 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14571 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14572 unsigned HOST_WIDE_INT begin = 0;
14574 /* Find a matching index by means of a binary search. */
14575 while (begin != end)
14577 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14578 tree index = (*elts)[middle].index;
14580 if (TREE_CODE (index) == INTEGER_CST
14581 && tree_int_cst_lt (index, op1))
14582 begin = middle + 1;
14583 else if (TREE_CODE (index) == INTEGER_CST
14584 && tree_int_cst_lt (op1, index))
14585 end = middle;
14586 else if (TREE_CODE (index) == RANGE_EXPR
14587 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14588 begin = middle + 1;
14589 else if (TREE_CODE (index) == RANGE_EXPR
14590 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14591 end = middle;
14592 else
14593 return (*elts)[middle].value;
14597 return t;
14600 /* Return a VECTOR_CST if possible. */
14601 case CONSTRUCTOR:
14603 tree type = TREE_TYPE (t);
14604 if (TREE_CODE (type) != VECTOR_TYPE)
14605 return t;
14607 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14608 unsigned HOST_WIDE_INT idx, pos = 0;
14609 tree value;
14611 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14613 if (!CONSTANT_CLASS_P (value))
14614 return t;
14615 if (TREE_CODE (value) == VECTOR_CST)
14617 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14618 vec[pos++] = VECTOR_CST_ELT (value, i);
14620 else
14621 vec[pos++] = value;
14623 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14624 vec[pos] = build_zero_cst (TREE_TYPE (type));
14626 return build_vector (type, vec);
14629 case CONST_DECL:
14630 return fold (DECL_INITIAL (t));
14632 default:
14633 return t;
14634 } /* switch (code) */
14637 #ifdef ENABLE_FOLD_CHECKING
14638 #undef fold
14640 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14641 hash_table <pointer_hash <tree_node> >);
14642 static void fold_check_failed (const_tree, const_tree);
14643 void print_fold_checksum (const_tree);
14645 /* When --enable-checking=fold, compute a digest of expr before
14646 and after actual fold call to see if fold did not accidentally
14647 change original expr. */
14649 tree
14650 fold (tree expr)
14652 tree ret;
14653 struct md5_ctx ctx;
14654 unsigned char checksum_before[16], checksum_after[16];
14655 hash_table <pointer_hash <tree_node> > ht;
14657 ht.create (32);
14658 md5_init_ctx (&ctx);
14659 fold_checksum_tree (expr, &ctx, ht);
14660 md5_finish_ctx (&ctx, checksum_before);
14661 ht.empty ();
14663 ret = fold_1 (expr);
14665 md5_init_ctx (&ctx);
14666 fold_checksum_tree (expr, &ctx, ht);
14667 md5_finish_ctx (&ctx, checksum_after);
14668 ht.dispose ();
14670 if (memcmp (checksum_before, checksum_after, 16))
14671 fold_check_failed (expr, ret);
14673 return ret;
14676 void
14677 print_fold_checksum (const_tree expr)
14679 struct md5_ctx ctx;
14680 unsigned char checksum[16], cnt;
14681 hash_table <pointer_hash <tree_node> > ht;
14683 ht.create (32);
14684 md5_init_ctx (&ctx);
14685 fold_checksum_tree (expr, &ctx, ht);
14686 md5_finish_ctx (&ctx, checksum);
14687 ht.dispose ();
14688 for (cnt = 0; cnt < 16; ++cnt)
14689 fprintf (stderr, "%02x", checksum[cnt]);
14690 putc ('\n', stderr);
14693 static void
14694 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14696 internal_error ("fold check: original tree changed by fold");
14699 static void
14700 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14701 hash_table <pointer_hash <tree_node> > ht)
14703 tree_node **slot;
14704 enum tree_code code;
14705 union tree_node buf;
14706 int i, len;
14708 recursive_label:
14709 if (expr == NULL)
14710 return;
14711 slot = ht.find_slot (expr, INSERT);
14712 if (*slot != NULL)
14713 return;
14714 *slot = CONST_CAST_TREE (expr);
14715 code = TREE_CODE (expr);
14716 if (TREE_CODE_CLASS (code) == tcc_declaration
14717 && DECL_ASSEMBLER_NAME_SET_P (expr))
14719 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14720 memcpy ((char *) &buf, expr, tree_size (expr));
14721 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14722 expr = (tree) &buf;
14724 else if (TREE_CODE_CLASS (code) == tcc_type
14725 && (TYPE_POINTER_TO (expr)
14726 || TYPE_REFERENCE_TO (expr)
14727 || TYPE_CACHED_VALUES_P (expr)
14728 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14729 || TYPE_NEXT_VARIANT (expr)))
14731 /* Allow these fields to be modified. */
14732 tree tmp;
14733 memcpy ((char *) &buf, expr, tree_size (expr));
14734 expr = tmp = (tree) &buf;
14735 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14736 TYPE_POINTER_TO (tmp) = NULL;
14737 TYPE_REFERENCE_TO (tmp) = NULL;
14738 TYPE_NEXT_VARIANT (tmp) = NULL;
14739 if (TYPE_CACHED_VALUES_P (tmp))
14741 TYPE_CACHED_VALUES_P (tmp) = 0;
14742 TYPE_CACHED_VALUES (tmp) = NULL;
14745 md5_process_bytes (expr, tree_size (expr), ctx);
14746 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14747 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14748 if (TREE_CODE_CLASS (code) != tcc_type
14749 && TREE_CODE_CLASS (code) != tcc_declaration
14750 && code != TREE_LIST
14751 && code != SSA_NAME
14752 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14753 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14754 switch (TREE_CODE_CLASS (code))
14756 case tcc_constant:
14757 switch (code)
14759 case STRING_CST:
14760 md5_process_bytes (TREE_STRING_POINTER (expr),
14761 TREE_STRING_LENGTH (expr), ctx);
14762 break;
14763 case COMPLEX_CST:
14764 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14765 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14766 break;
14767 case VECTOR_CST:
14768 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14769 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14770 break;
14771 default:
14772 break;
14774 break;
14775 case tcc_exceptional:
14776 switch (code)
14778 case TREE_LIST:
14779 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14780 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14781 expr = TREE_CHAIN (expr);
14782 goto recursive_label;
14783 break;
14784 case TREE_VEC:
14785 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14786 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14787 break;
14788 default:
14789 break;
14791 break;
14792 case tcc_expression:
14793 case tcc_reference:
14794 case tcc_comparison:
14795 case tcc_unary:
14796 case tcc_binary:
14797 case tcc_statement:
14798 case tcc_vl_exp:
14799 len = TREE_OPERAND_LENGTH (expr);
14800 for (i = 0; i < len; ++i)
14801 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14802 break;
14803 case tcc_declaration:
14804 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14805 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14806 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14808 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14809 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14810 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14811 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14812 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14814 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14815 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14817 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14819 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14820 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14821 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14823 break;
14824 case tcc_type:
14825 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14826 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14827 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14828 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14829 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14830 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14831 if (INTEGRAL_TYPE_P (expr)
14832 || SCALAR_FLOAT_TYPE_P (expr))
14834 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14835 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14837 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14838 if (TREE_CODE (expr) == RECORD_TYPE
14839 || TREE_CODE (expr) == UNION_TYPE
14840 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14841 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14842 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14843 break;
14844 default:
14845 break;
14849 /* Helper function for outputting the checksum of a tree T. When
14850 debugging with gdb, you can "define mynext" to be "next" followed
14851 by "call debug_fold_checksum (op0)", then just trace down till the
14852 outputs differ. */
14854 DEBUG_FUNCTION void
14855 debug_fold_checksum (const_tree t)
14857 int i;
14858 unsigned char checksum[16];
14859 struct md5_ctx ctx;
14860 hash_table <pointer_hash <tree_node> > ht;
14861 ht.create (32);
14863 md5_init_ctx (&ctx);
14864 fold_checksum_tree (t, &ctx, ht);
14865 md5_finish_ctx (&ctx, checksum);
14866 ht.empty ();
14868 for (i = 0; i < 16; i++)
14869 fprintf (stderr, "%d ", checksum[i]);
14871 fprintf (stderr, "\n");
14874 #endif
14876 /* Fold a unary tree expression with code CODE of type TYPE with an
14877 operand OP0. LOC is the location of the resulting expression.
14878 Return a folded expression if successful. Otherwise, return a tree
14879 expression with code CODE of type TYPE with an operand OP0. */
14881 tree
14882 fold_build1_stat_loc (location_t loc,
14883 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14885 tree tem;
14886 #ifdef ENABLE_FOLD_CHECKING
14887 unsigned char checksum_before[16], checksum_after[16];
14888 struct md5_ctx ctx;
14889 hash_table <pointer_hash <tree_node> > ht;
14891 ht.create (32);
14892 md5_init_ctx (&ctx);
14893 fold_checksum_tree (op0, &ctx, ht);
14894 md5_finish_ctx (&ctx, checksum_before);
14895 ht.empty ();
14896 #endif
14898 tem = fold_unary_loc (loc, code, type, op0);
14899 if (!tem)
14900 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14902 #ifdef ENABLE_FOLD_CHECKING
14903 md5_init_ctx (&ctx);
14904 fold_checksum_tree (op0, &ctx, ht);
14905 md5_finish_ctx (&ctx, checksum_after);
14906 ht.dispose ();
14908 if (memcmp (checksum_before, checksum_after, 16))
14909 fold_check_failed (op0, tem);
14910 #endif
14911 return tem;
14914 /* Fold a binary tree expression with code CODE of type TYPE with
14915 operands OP0 and OP1. LOC is the location of the resulting
14916 expression. Return a folded expression if successful. Otherwise,
14917 return a tree expression with code CODE of type TYPE with operands
14918 OP0 and OP1. */
14920 tree
14921 fold_build2_stat_loc (location_t loc,
14922 enum tree_code code, tree type, tree op0, tree op1
14923 MEM_STAT_DECL)
14925 tree tem;
14926 #ifdef ENABLE_FOLD_CHECKING
14927 unsigned char checksum_before_op0[16],
14928 checksum_before_op1[16],
14929 checksum_after_op0[16],
14930 checksum_after_op1[16];
14931 struct md5_ctx ctx;
14932 hash_table <pointer_hash <tree_node> > ht;
14934 ht.create (32);
14935 md5_init_ctx (&ctx);
14936 fold_checksum_tree (op0, &ctx, ht);
14937 md5_finish_ctx (&ctx, checksum_before_op0);
14938 ht.empty ();
14940 md5_init_ctx (&ctx);
14941 fold_checksum_tree (op1, &ctx, ht);
14942 md5_finish_ctx (&ctx, checksum_before_op1);
14943 ht.empty ();
14944 #endif
14946 tem = fold_binary_loc (loc, code, type, op0, op1);
14947 if (!tem)
14948 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14950 #ifdef ENABLE_FOLD_CHECKING
14951 md5_init_ctx (&ctx);
14952 fold_checksum_tree (op0, &ctx, ht);
14953 md5_finish_ctx (&ctx, checksum_after_op0);
14954 ht.empty ();
14956 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14957 fold_check_failed (op0, tem);
14959 md5_init_ctx (&ctx);
14960 fold_checksum_tree (op1, &ctx, ht);
14961 md5_finish_ctx (&ctx, checksum_after_op1);
14962 ht.dispose ();
14964 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14965 fold_check_failed (op1, tem);
14966 #endif
14967 return tem;
14970 /* Fold a ternary tree expression with code CODE of type TYPE with
14971 operands OP0, OP1, and OP2. Return a folded expression if
14972 successful. Otherwise, return a tree expression with code CODE of
14973 type TYPE with operands OP0, OP1, and OP2. */
14975 tree
14976 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14977 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14979 tree tem;
14980 #ifdef ENABLE_FOLD_CHECKING
14981 unsigned char checksum_before_op0[16],
14982 checksum_before_op1[16],
14983 checksum_before_op2[16],
14984 checksum_after_op0[16],
14985 checksum_after_op1[16],
14986 checksum_after_op2[16];
14987 struct md5_ctx ctx;
14988 hash_table <pointer_hash <tree_node> > ht;
14990 ht.create (32);
14991 md5_init_ctx (&ctx);
14992 fold_checksum_tree (op0, &ctx, ht);
14993 md5_finish_ctx (&ctx, checksum_before_op0);
14994 ht.empty ();
14996 md5_init_ctx (&ctx);
14997 fold_checksum_tree (op1, &ctx, ht);
14998 md5_finish_ctx (&ctx, checksum_before_op1);
14999 ht.empty ();
15001 md5_init_ctx (&ctx);
15002 fold_checksum_tree (op2, &ctx, ht);
15003 md5_finish_ctx (&ctx, checksum_before_op2);
15004 ht.empty ();
15005 #endif
15007 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15008 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15009 if (!tem)
15010 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15012 #ifdef ENABLE_FOLD_CHECKING
15013 md5_init_ctx (&ctx);
15014 fold_checksum_tree (op0, &ctx, ht);
15015 md5_finish_ctx (&ctx, checksum_after_op0);
15016 ht.empty ();
15018 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15019 fold_check_failed (op0, tem);
15021 md5_init_ctx (&ctx);
15022 fold_checksum_tree (op1, &ctx, ht);
15023 md5_finish_ctx (&ctx, checksum_after_op1);
15024 ht.empty ();
15026 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15027 fold_check_failed (op1, tem);
15029 md5_init_ctx (&ctx);
15030 fold_checksum_tree (op2, &ctx, ht);
15031 md5_finish_ctx (&ctx, checksum_after_op2);
15032 ht.dispose ();
15034 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15035 fold_check_failed (op2, tem);
15036 #endif
15037 return tem;
15040 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15041 arguments in ARGARRAY, and a null static chain.
15042 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15043 of type TYPE from the given operands as constructed by build_call_array. */
15045 tree
15046 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15047 int nargs, tree *argarray)
15049 tree tem;
15050 #ifdef ENABLE_FOLD_CHECKING
15051 unsigned char checksum_before_fn[16],
15052 checksum_before_arglist[16],
15053 checksum_after_fn[16],
15054 checksum_after_arglist[16];
15055 struct md5_ctx ctx;
15056 hash_table <pointer_hash <tree_node> > ht;
15057 int i;
15059 ht.create (32);
15060 md5_init_ctx (&ctx);
15061 fold_checksum_tree (fn, &ctx, ht);
15062 md5_finish_ctx (&ctx, checksum_before_fn);
15063 ht.empty ();
15065 md5_init_ctx (&ctx);
15066 for (i = 0; i < nargs; i++)
15067 fold_checksum_tree (argarray[i], &ctx, ht);
15068 md5_finish_ctx (&ctx, checksum_before_arglist);
15069 ht.empty ();
15070 #endif
15072 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15074 #ifdef ENABLE_FOLD_CHECKING
15075 md5_init_ctx (&ctx);
15076 fold_checksum_tree (fn, &ctx, ht);
15077 md5_finish_ctx (&ctx, checksum_after_fn);
15078 ht.empty ();
15080 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15081 fold_check_failed (fn, tem);
15083 md5_init_ctx (&ctx);
15084 for (i = 0; i < nargs; i++)
15085 fold_checksum_tree (argarray[i], &ctx, ht);
15086 md5_finish_ctx (&ctx, checksum_after_arglist);
15087 ht.dispose ();
15089 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15090 fold_check_failed (NULL_TREE, tem);
15091 #endif
15092 return tem;
15095 /* Perform constant folding and related simplification of initializer
15096 expression EXPR. These behave identically to "fold_buildN" but ignore
15097 potential run-time traps and exceptions that fold must preserve. */
15099 #define START_FOLD_INIT \
15100 int saved_signaling_nans = flag_signaling_nans;\
15101 int saved_trapping_math = flag_trapping_math;\
15102 int saved_rounding_math = flag_rounding_math;\
15103 int saved_trapv = flag_trapv;\
15104 int saved_folding_initializer = folding_initializer;\
15105 flag_signaling_nans = 0;\
15106 flag_trapping_math = 0;\
15107 flag_rounding_math = 0;\
15108 flag_trapv = 0;\
15109 folding_initializer = 1;
15111 #define END_FOLD_INIT \
15112 flag_signaling_nans = saved_signaling_nans;\
15113 flag_trapping_math = saved_trapping_math;\
15114 flag_rounding_math = saved_rounding_math;\
15115 flag_trapv = saved_trapv;\
15116 folding_initializer = saved_folding_initializer;
15118 tree
15119 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15120 tree type, tree op)
15122 tree result;
15123 START_FOLD_INIT;
15125 result = fold_build1_loc (loc, code, type, op);
15127 END_FOLD_INIT;
15128 return result;
15131 tree
15132 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15133 tree type, tree op0, tree op1)
15135 tree result;
15136 START_FOLD_INIT;
15138 result = fold_build2_loc (loc, code, type, op0, op1);
15140 END_FOLD_INIT;
15141 return result;
15144 tree
15145 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15146 tree type, tree op0, tree op1, tree op2)
15148 tree result;
15149 START_FOLD_INIT;
15151 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15153 END_FOLD_INIT;
15154 return result;
15157 tree
15158 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15159 int nargs, tree *argarray)
15161 tree result;
15162 START_FOLD_INIT;
15164 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15166 END_FOLD_INIT;
15167 return result;
15170 #undef START_FOLD_INIT
15171 #undef END_FOLD_INIT
15173 /* Determine if first argument is a multiple of second argument. Return 0 if
15174 it is not, or we cannot easily determined it to be.
15176 An example of the sort of thing we care about (at this point; this routine
15177 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15178 fold cases do now) is discovering that
15180 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15182 is a multiple of
15184 SAVE_EXPR (J * 8)
15186 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15188 This code also handles discovering that
15190 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15192 is a multiple of 8 so we don't have to worry about dealing with a
15193 possible remainder.
15195 Note that we *look* inside a SAVE_EXPR only to determine how it was
15196 calculated; it is not safe for fold to do much of anything else with the
15197 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15198 at run time. For example, the latter example above *cannot* be implemented
15199 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15200 evaluation time of the original SAVE_EXPR is not necessarily the same at
15201 the time the new expression is evaluated. The only optimization of this
15202 sort that would be valid is changing
15204 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15206 divided by 8 to
15208 SAVE_EXPR (I) * SAVE_EXPR (J)
15210 (where the same SAVE_EXPR (J) is used in the original and the
15211 transformed version). */
15214 multiple_of_p (tree type, const_tree top, const_tree bottom)
15216 if (operand_equal_p (top, bottom, 0))
15217 return 1;
15219 if (TREE_CODE (type) != INTEGER_TYPE)
15220 return 0;
15222 switch (TREE_CODE (top))
15224 case BIT_AND_EXPR:
15225 /* Bitwise and provides a power of two multiple. If the mask is
15226 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15227 if (!integer_pow2p (bottom))
15228 return 0;
15229 /* FALLTHRU */
15231 case MULT_EXPR:
15232 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15233 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15235 case PLUS_EXPR:
15236 case MINUS_EXPR:
15237 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15238 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15240 case LSHIFT_EXPR:
15241 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15243 tree op1, t1;
15245 op1 = TREE_OPERAND (top, 1);
15246 /* const_binop may not detect overflow correctly,
15247 so check for it explicitly here. */
15248 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15249 > TREE_INT_CST_LOW (op1)
15250 && TREE_INT_CST_HIGH (op1) == 0
15251 && 0 != (t1 = fold_convert (type,
15252 const_binop (LSHIFT_EXPR,
15253 size_one_node,
15254 op1)))
15255 && !TREE_OVERFLOW (t1))
15256 return multiple_of_p (type, t1, bottom);
15258 return 0;
15260 case NOP_EXPR:
15261 /* Can't handle conversions from non-integral or wider integral type. */
15262 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15263 || (TYPE_PRECISION (type)
15264 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15265 return 0;
15267 /* .. fall through ... */
15269 case SAVE_EXPR:
15270 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15272 case COND_EXPR:
15273 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15274 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15276 case INTEGER_CST:
15277 if (TREE_CODE (bottom) != INTEGER_CST
15278 || integer_zerop (bottom)
15279 || (TYPE_UNSIGNED (type)
15280 && (tree_int_cst_sgn (top) < 0
15281 || tree_int_cst_sgn (bottom) < 0)))
15282 return 0;
15283 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15284 top, bottom));
15286 default:
15287 return 0;
15291 /* Return true if CODE or TYPE is known to be non-negative. */
15293 static bool
15294 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15296 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15297 && truth_value_p (code))
15298 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15299 have a signed:1 type (where the value is -1 and 0). */
15300 return true;
15301 return false;
15304 /* Return true if (CODE OP0) is known to be non-negative. If the return
15305 value is based on the assumption that signed overflow is undefined,
15306 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15307 *STRICT_OVERFLOW_P. */
15309 bool
15310 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15311 bool *strict_overflow_p)
15313 if (TYPE_UNSIGNED (type))
15314 return true;
15316 switch (code)
15318 case ABS_EXPR:
15319 /* We can't return 1 if flag_wrapv is set because
15320 ABS_EXPR<INT_MIN> = INT_MIN. */
15321 if (!INTEGRAL_TYPE_P (type))
15322 return true;
15323 if (TYPE_OVERFLOW_UNDEFINED (type))
15325 *strict_overflow_p = true;
15326 return true;
15328 break;
15330 case NON_LVALUE_EXPR:
15331 case FLOAT_EXPR:
15332 case FIX_TRUNC_EXPR:
15333 return tree_expr_nonnegative_warnv_p (op0,
15334 strict_overflow_p);
15336 case NOP_EXPR:
15338 tree inner_type = TREE_TYPE (op0);
15339 tree outer_type = type;
15341 if (TREE_CODE (outer_type) == REAL_TYPE)
15343 if (TREE_CODE (inner_type) == REAL_TYPE)
15344 return tree_expr_nonnegative_warnv_p (op0,
15345 strict_overflow_p);
15346 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15348 if (TYPE_UNSIGNED (inner_type))
15349 return true;
15350 return tree_expr_nonnegative_warnv_p (op0,
15351 strict_overflow_p);
15354 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15356 if (TREE_CODE (inner_type) == REAL_TYPE)
15357 return tree_expr_nonnegative_warnv_p (op0,
15358 strict_overflow_p);
15359 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15360 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15361 && TYPE_UNSIGNED (inner_type);
15364 break;
15366 default:
15367 return tree_simple_nonnegative_warnv_p (code, type);
15370 /* We don't know sign of `t', so be conservative and return false. */
15371 return false;
15374 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15375 value is based on the assumption that signed overflow is undefined,
15376 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15377 *STRICT_OVERFLOW_P. */
15379 bool
15380 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15381 tree op1, bool *strict_overflow_p)
15383 if (TYPE_UNSIGNED (type))
15384 return true;
15386 switch (code)
15388 case POINTER_PLUS_EXPR:
15389 case PLUS_EXPR:
15390 if (FLOAT_TYPE_P (type))
15391 return (tree_expr_nonnegative_warnv_p (op0,
15392 strict_overflow_p)
15393 && tree_expr_nonnegative_warnv_p (op1,
15394 strict_overflow_p));
15396 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15397 both unsigned and at least 2 bits shorter than the result. */
15398 if (TREE_CODE (type) == INTEGER_TYPE
15399 && TREE_CODE (op0) == NOP_EXPR
15400 && TREE_CODE (op1) == NOP_EXPR)
15402 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15403 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15404 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15405 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15407 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15408 TYPE_PRECISION (inner2)) + 1;
15409 return prec < TYPE_PRECISION (type);
15412 break;
15414 case MULT_EXPR:
15415 if (FLOAT_TYPE_P (type))
15417 /* x * x for floating point x is always non-negative. */
15418 if (operand_equal_p (op0, op1, 0))
15419 return true;
15420 return (tree_expr_nonnegative_warnv_p (op0,
15421 strict_overflow_p)
15422 && tree_expr_nonnegative_warnv_p (op1,
15423 strict_overflow_p));
15426 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15427 both unsigned and their total bits is shorter than the result. */
15428 if (TREE_CODE (type) == INTEGER_TYPE
15429 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15430 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15432 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15433 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15434 : TREE_TYPE (op0);
15435 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15436 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15437 : TREE_TYPE (op1);
15439 bool unsigned0 = TYPE_UNSIGNED (inner0);
15440 bool unsigned1 = TYPE_UNSIGNED (inner1);
15442 if (TREE_CODE (op0) == INTEGER_CST)
15443 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15445 if (TREE_CODE (op1) == INTEGER_CST)
15446 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15448 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15449 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15451 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15452 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15453 : TYPE_PRECISION (inner0);
15455 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15456 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15457 : TYPE_PRECISION (inner1);
15459 return precision0 + precision1 < TYPE_PRECISION (type);
15462 return false;
15464 case BIT_AND_EXPR:
15465 case MAX_EXPR:
15466 return (tree_expr_nonnegative_warnv_p (op0,
15467 strict_overflow_p)
15468 || tree_expr_nonnegative_warnv_p (op1,
15469 strict_overflow_p));
15471 case BIT_IOR_EXPR:
15472 case BIT_XOR_EXPR:
15473 case MIN_EXPR:
15474 case RDIV_EXPR:
15475 case TRUNC_DIV_EXPR:
15476 case CEIL_DIV_EXPR:
15477 case FLOOR_DIV_EXPR:
15478 case ROUND_DIV_EXPR:
15479 return (tree_expr_nonnegative_warnv_p (op0,
15480 strict_overflow_p)
15481 && tree_expr_nonnegative_warnv_p (op1,
15482 strict_overflow_p));
15484 case TRUNC_MOD_EXPR:
15485 case CEIL_MOD_EXPR:
15486 case FLOOR_MOD_EXPR:
15487 case ROUND_MOD_EXPR:
15488 return tree_expr_nonnegative_warnv_p (op0,
15489 strict_overflow_p);
15490 default:
15491 return tree_simple_nonnegative_warnv_p (code, type);
15494 /* We don't know sign of `t', so be conservative and return false. */
15495 return false;
15498 /* Return true if T is known to be non-negative. If the return
15499 value is based on the assumption that signed overflow is undefined,
15500 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15501 *STRICT_OVERFLOW_P. */
15503 bool
15504 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15506 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15507 return true;
15509 switch (TREE_CODE (t))
15511 case INTEGER_CST:
15512 return tree_int_cst_sgn (t) >= 0;
15514 case REAL_CST:
15515 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15517 case FIXED_CST:
15518 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15520 case COND_EXPR:
15521 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15522 strict_overflow_p)
15523 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15524 strict_overflow_p));
15525 default:
15526 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15527 TREE_TYPE (t));
15529 /* We don't know sign of `t', so be conservative and return false. */
15530 return false;
15533 /* Return true if T is known to be non-negative. If the return
15534 value is based on the assumption that signed overflow is undefined,
15535 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15536 *STRICT_OVERFLOW_P. */
15538 bool
15539 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15540 tree arg0, tree arg1, bool *strict_overflow_p)
15542 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15543 switch (DECL_FUNCTION_CODE (fndecl))
15545 CASE_FLT_FN (BUILT_IN_ACOS):
15546 CASE_FLT_FN (BUILT_IN_ACOSH):
15547 CASE_FLT_FN (BUILT_IN_CABS):
15548 CASE_FLT_FN (BUILT_IN_COSH):
15549 CASE_FLT_FN (BUILT_IN_ERFC):
15550 CASE_FLT_FN (BUILT_IN_EXP):
15551 CASE_FLT_FN (BUILT_IN_EXP10):
15552 CASE_FLT_FN (BUILT_IN_EXP2):
15553 CASE_FLT_FN (BUILT_IN_FABS):
15554 CASE_FLT_FN (BUILT_IN_FDIM):
15555 CASE_FLT_FN (BUILT_IN_HYPOT):
15556 CASE_FLT_FN (BUILT_IN_POW10):
15557 CASE_INT_FN (BUILT_IN_FFS):
15558 CASE_INT_FN (BUILT_IN_PARITY):
15559 CASE_INT_FN (BUILT_IN_POPCOUNT):
15560 case BUILT_IN_BSWAP32:
15561 case BUILT_IN_BSWAP64:
15562 /* Always true. */
15563 return true;
15565 CASE_FLT_FN (BUILT_IN_SQRT):
15566 /* sqrt(-0.0) is -0.0. */
15567 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15568 return true;
15569 return tree_expr_nonnegative_warnv_p (arg0,
15570 strict_overflow_p);
15572 CASE_FLT_FN (BUILT_IN_ASINH):
15573 CASE_FLT_FN (BUILT_IN_ATAN):
15574 CASE_FLT_FN (BUILT_IN_ATANH):
15575 CASE_FLT_FN (BUILT_IN_CBRT):
15576 CASE_FLT_FN (BUILT_IN_CEIL):
15577 CASE_FLT_FN (BUILT_IN_ERF):
15578 CASE_FLT_FN (BUILT_IN_EXPM1):
15579 CASE_FLT_FN (BUILT_IN_FLOOR):
15580 CASE_FLT_FN (BUILT_IN_FMOD):
15581 CASE_FLT_FN (BUILT_IN_FREXP):
15582 CASE_FLT_FN (BUILT_IN_ICEIL):
15583 CASE_FLT_FN (BUILT_IN_IFLOOR):
15584 CASE_FLT_FN (BUILT_IN_IRINT):
15585 CASE_FLT_FN (BUILT_IN_IROUND):
15586 CASE_FLT_FN (BUILT_IN_LCEIL):
15587 CASE_FLT_FN (BUILT_IN_LDEXP):
15588 CASE_FLT_FN (BUILT_IN_LFLOOR):
15589 CASE_FLT_FN (BUILT_IN_LLCEIL):
15590 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15591 CASE_FLT_FN (BUILT_IN_LLRINT):
15592 CASE_FLT_FN (BUILT_IN_LLROUND):
15593 CASE_FLT_FN (BUILT_IN_LRINT):
15594 CASE_FLT_FN (BUILT_IN_LROUND):
15595 CASE_FLT_FN (BUILT_IN_MODF):
15596 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15597 CASE_FLT_FN (BUILT_IN_RINT):
15598 CASE_FLT_FN (BUILT_IN_ROUND):
15599 CASE_FLT_FN (BUILT_IN_SCALB):
15600 CASE_FLT_FN (BUILT_IN_SCALBLN):
15601 CASE_FLT_FN (BUILT_IN_SCALBN):
15602 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15603 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15604 CASE_FLT_FN (BUILT_IN_SINH):
15605 CASE_FLT_FN (BUILT_IN_TANH):
15606 CASE_FLT_FN (BUILT_IN_TRUNC):
15607 /* True if the 1st argument is nonnegative. */
15608 return tree_expr_nonnegative_warnv_p (arg0,
15609 strict_overflow_p);
15611 CASE_FLT_FN (BUILT_IN_FMAX):
15612 /* True if the 1st OR 2nd arguments are nonnegative. */
15613 return (tree_expr_nonnegative_warnv_p (arg0,
15614 strict_overflow_p)
15615 || (tree_expr_nonnegative_warnv_p (arg1,
15616 strict_overflow_p)));
15618 CASE_FLT_FN (BUILT_IN_FMIN):
15619 /* True if the 1st AND 2nd arguments are nonnegative. */
15620 return (tree_expr_nonnegative_warnv_p (arg0,
15621 strict_overflow_p)
15622 && (tree_expr_nonnegative_warnv_p (arg1,
15623 strict_overflow_p)));
15625 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15626 /* True if the 2nd argument is nonnegative. */
15627 return tree_expr_nonnegative_warnv_p (arg1,
15628 strict_overflow_p);
15630 CASE_FLT_FN (BUILT_IN_POWI):
15631 /* True if the 1st argument is nonnegative or the second
15632 argument is an even integer. */
15633 if (TREE_CODE (arg1) == INTEGER_CST
15634 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15635 return true;
15636 return tree_expr_nonnegative_warnv_p (arg0,
15637 strict_overflow_p);
15639 CASE_FLT_FN (BUILT_IN_POW):
15640 /* True if the 1st argument is nonnegative or the second
15641 argument is an even integer valued real. */
15642 if (TREE_CODE (arg1) == REAL_CST)
15644 REAL_VALUE_TYPE c;
15645 HOST_WIDE_INT n;
15647 c = TREE_REAL_CST (arg1);
15648 n = real_to_integer (&c);
15649 if ((n & 1) == 0)
15651 REAL_VALUE_TYPE cint;
15652 real_from_integer (&cint, VOIDmode, n,
15653 n < 0 ? -1 : 0, 0);
15654 if (real_identical (&c, &cint))
15655 return true;
15658 return tree_expr_nonnegative_warnv_p (arg0,
15659 strict_overflow_p);
15661 default:
15662 break;
15664 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15665 type);
15668 /* Return true if T is known to be non-negative. If the return
15669 value is based on the assumption that signed overflow is undefined,
15670 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15671 *STRICT_OVERFLOW_P. */
15673 bool
15674 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15676 enum tree_code code = TREE_CODE (t);
15677 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15678 return true;
15680 switch (code)
15682 case TARGET_EXPR:
15684 tree temp = TARGET_EXPR_SLOT (t);
15685 t = TARGET_EXPR_INITIAL (t);
15687 /* If the initializer is non-void, then it's a normal expression
15688 that will be assigned to the slot. */
15689 if (!VOID_TYPE_P (t))
15690 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15692 /* Otherwise, the initializer sets the slot in some way. One common
15693 way is an assignment statement at the end of the initializer. */
15694 while (1)
15696 if (TREE_CODE (t) == BIND_EXPR)
15697 t = expr_last (BIND_EXPR_BODY (t));
15698 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15699 || TREE_CODE (t) == TRY_CATCH_EXPR)
15700 t = expr_last (TREE_OPERAND (t, 0));
15701 else if (TREE_CODE (t) == STATEMENT_LIST)
15702 t = expr_last (t);
15703 else
15704 break;
15706 if (TREE_CODE (t) == MODIFY_EXPR
15707 && TREE_OPERAND (t, 0) == temp)
15708 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15709 strict_overflow_p);
15711 return false;
15714 case CALL_EXPR:
15716 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15717 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15719 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15720 get_callee_fndecl (t),
15721 arg0,
15722 arg1,
15723 strict_overflow_p);
15725 case COMPOUND_EXPR:
15726 case MODIFY_EXPR:
15727 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15728 strict_overflow_p);
15729 case BIND_EXPR:
15730 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15731 strict_overflow_p);
15732 case SAVE_EXPR:
15733 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15734 strict_overflow_p);
15736 default:
15737 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15738 TREE_TYPE (t));
15741 /* We don't know sign of `t', so be conservative and return false. */
15742 return false;
15745 /* Return true if T is known to be non-negative. If the return
15746 value is based on the assumption that signed overflow is undefined,
15747 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15748 *STRICT_OVERFLOW_P. */
15750 bool
15751 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15753 enum tree_code code;
15754 if (t == error_mark_node)
15755 return false;
15757 code = TREE_CODE (t);
15758 switch (TREE_CODE_CLASS (code))
15760 case tcc_binary:
15761 case tcc_comparison:
15762 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15763 TREE_TYPE (t),
15764 TREE_OPERAND (t, 0),
15765 TREE_OPERAND (t, 1),
15766 strict_overflow_p);
15768 case tcc_unary:
15769 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15770 TREE_TYPE (t),
15771 TREE_OPERAND (t, 0),
15772 strict_overflow_p);
15774 case tcc_constant:
15775 case tcc_declaration:
15776 case tcc_reference:
15777 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15779 default:
15780 break;
15783 switch (code)
15785 case TRUTH_AND_EXPR:
15786 case TRUTH_OR_EXPR:
15787 case TRUTH_XOR_EXPR:
15788 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15789 TREE_TYPE (t),
15790 TREE_OPERAND (t, 0),
15791 TREE_OPERAND (t, 1),
15792 strict_overflow_p);
15793 case TRUTH_NOT_EXPR:
15794 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15795 TREE_TYPE (t),
15796 TREE_OPERAND (t, 0),
15797 strict_overflow_p);
15799 case COND_EXPR:
15800 case CONSTRUCTOR:
15801 case OBJ_TYPE_REF:
15802 case ASSERT_EXPR:
15803 case ADDR_EXPR:
15804 case WITH_SIZE_EXPR:
15805 case SSA_NAME:
15806 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15808 default:
15809 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15813 /* Return true if `t' is known to be non-negative. Handle warnings
15814 about undefined signed overflow. */
15816 bool
15817 tree_expr_nonnegative_p (tree t)
15819 bool ret, strict_overflow_p;
15821 strict_overflow_p = false;
15822 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15823 if (strict_overflow_p)
15824 fold_overflow_warning (("assuming signed overflow does not occur when "
15825 "determining that expression is always "
15826 "non-negative"),
15827 WARN_STRICT_OVERFLOW_MISC);
15828 return ret;
15832 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15833 For floating point we further ensure that T is not denormal.
15834 Similar logic is present in nonzero_address in rtlanal.h.
15836 If the return value is based on the assumption that signed overflow
15837 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15838 change *STRICT_OVERFLOW_P. */
15840 bool
15841 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15842 bool *strict_overflow_p)
15844 switch (code)
15846 case ABS_EXPR:
15847 return tree_expr_nonzero_warnv_p (op0,
15848 strict_overflow_p);
15850 case NOP_EXPR:
15852 tree inner_type = TREE_TYPE (op0);
15853 tree outer_type = type;
15855 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15856 && tree_expr_nonzero_warnv_p (op0,
15857 strict_overflow_p));
15859 break;
15861 case NON_LVALUE_EXPR:
15862 return tree_expr_nonzero_warnv_p (op0,
15863 strict_overflow_p);
15865 default:
15866 break;
15869 return false;
15872 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15873 For floating point we further ensure that T is not denormal.
15874 Similar logic is present in nonzero_address in rtlanal.h.
15876 If the return value is based on the assumption that signed overflow
15877 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15878 change *STRICT_OVERFLOW_P. */
15880 bool
15881 tree_binary_nonzero_warnv_p (enum tree_code code,
15882 tree type,
15883 tree op0,
15884 tree op1, bool *strict_overflow_p)
15886 bool sub_strict_overflow_p;
15887 switch (code)
15889 case POINTER_PLUS_EXPR:
15890 case PLUS_EXPR:
15891 if (TYPE_OVERFLOW_UNDEFINED (type))
15893 /* With the presence of negative values it is hard
15894 to say something. */
15895 sub_strict_overflow_p = false;
15896 if (!tree_expr_nonnegative_warnv_p (op0,
15897 &sub_strict_overflow_p)
15898 || !tree_expr_nonnegative_warnv_p (op1,
15899 &sub_strict_overflow_p))
15900 return false;
15901 /* One of operands must be positive and the other non-negative. */
15902 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15903 overflows, on a twos-complement machine the sum of two
15904 nonnegative numbers can never be zero. */
15905 return (tree_expr_nonzero_warnv_p (op0,
15906 strict_overflow_p)
15907 || tree_expr_nonzero_warnv_p (op1,
15908 strict_overflow_p));
15910 break;
15912 case MULT_EXPR:
15913 if (TYPE_OVERFLOW_UNDEFINED (type))
15915 if (tree_expr_nonzero_warnv_p (op0,
15916 strict_overflow_p)
15917 && tree_expr_nonzero_warnv_p (op1,
15918 strict_overflow_p))
15920 *strict_overflow_p = true;
15921 return true;
15924 break;
15926 case MIN_EXPR:
15927 sub_strict_overflow_p = false;
15928 if (tree_expr_nonzero_warnv_p (op0,
15929 &sub_strict_overflow_p)
15930 && tree_expr_nonzero_warnv_p (op1,
15931 &sub_strict_overflow_p))
15933 if (sub_strict_overflow_p)
15934 *strict_overflow_p = true;
15936 break;
15938 case MAX_EXPR:
15939 sub_strict_overflow_p = false;
15940 if (tree_expr_nonzero_warnv_p (op0,
15941 &sub_strict_overflow_p))
15943 if (sub_strict_overflow_p)
15944 *strict_overflow_p = true;
15946 /* When both operands are nonzero, then MAX must be too. */
15947 if (tree_expr_nonzero_warnv_p (op1,
15948 strict_overflow_p))
15949 return true;
15951 /* MAX where operand 0 is positive is positive. */
15952 return tree_expr_nonnegative_warnv_p (op0,
15953 strict_overflow_p);
15955 /* MAX where operand 1 is positive is positive. */
15956 else if (tree_expr_nonzero_warnv_p (op1,
15957 &sub_strict_overflow_p)
15958 && tree_expr_nonnegative_warnv_p (op1,
15959 &sub_strict_overflow_p))
15961 if (sub_strict_overflow_p)
15962 *strict_overflow_p = true;
15963 return true;
15965 break;
15967 case BIT_IOR_EXPR:
15968 return (tree_expr_nonzero_warnv_p (op1,
15969 strict_overflow_p)
15970 || tree_expr_nonzero_warnv_p (op0,
15971 strict_overflow_p));
15973 default:
15974 break;
15977 return false;
15980 /* Return true when T is an address and is known to be nonzero.
15981 For floating point we further ensure that T is not denormal.
15982 Similar logic is present in nonzero_address in rtlanal.h.
15984 If the return value is based on the assumption that signed overflow
15985 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15986 change *STRICT_OVERFLOW_P. */
15988 bool
15989 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15991 bool sub_strict_overflow_p;
15992 switch (TREE_CODE (t))
15994 case INTEGER_CST:
15995 return !integer_zerop (t);
15997 case ADDR_EXPR:
15999 tree base = TREE_OPERAND (t, 0);
16000 if (!DECL_P (base))
16001 base = get_base_address (base);
16003 if (!base)
16004 return false;
16006 /* Weak declarations may link to NULL. Other things may also be NULL
16007 so protect with -fdelete-null-pointer-checks; but not variables
16008 allocated on the stack. */
16009 if (DECL_P (base)
16010 && (flag_delete_null_pointer_checks
16011 || (DECL_CONTEXT (base)
16012 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16013 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16014 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16016 /* Constants are never weak. */
16017 if (CONSTANT_CLASS_P (base))
16018 return true;
16020 return false;
16023 case COND_EXPR:
16024 sub_strict_overflow_p = false;
16025 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16026 &sub_strict_overflow_p)
16027 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16028 &sub_strict_overflow_p))
16030 if (sub_strict_overflow_p)
16031 *strict_overflow_p = true;
16032 return true;
16034 break;
16036 default:
16037 break;
16039 return false;
16042 /* Return true when T is an address and is known to be nonzero.
16043 For floating point we further ensure that T is not denormal.
16044 Similar logic is present in nonzero_address in rtlanal.h.
16046 If the return value is based on the assumption that signed overflow
16047 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16048 change *STRICT_OVERFLOW_P. */
16050 bool
16051 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16053 tree type = TREE_TYPE (t);
16054 enum tree_code code;
16056 /* Doing something useful for floating point would need more work. */
16057 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16058 return false;
16060 code = TREE_CODE (t);
16061 switch (TREE_CODE_CLASS (code))
16063 case tcc_unary:
16064 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16065 strict_overflow_p);
16066 case tcc_binary:
16067 case tcc_comparison:
16068 return tree_binary_nonzero_warnv_p (code, type,
16069 TREE_OPERAND (t, 0),
16070 TREE_OPERAND (t, 1),
16071 strict_overflow_p);
16072 case tcc_constant:
16073 case tcc_declaration:
16074 case tcc_reference:
16075 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16077 default:
16078 break;
16081 switch (code)
16083 case TRUTH_NOT_EXPR:
16084 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16085 strict_overflow_p);
16087 case TRUTH_AND_EXPR:
16088 case TRUTH_OR_EXPR:
16089 case TRUTH_XOR_EXPR:
16090 return tree_binary_nonzero_warnv_p (code, type,
16091 TREE_OPERAND (t, 0),
16092 TREE_OPERAND (t, 1),
16093 strict_overflow_p);
16095 case COND_EXPR:
16096 case CONSTRUCTOR:
16097 case OBJ_TYPE_REF:
16098 case ASSERT_EXPR:
16099 case ADDR_EXPR:
16100 case WITH_SIZE_EXPR:
16101 case SSA_NAME:
16102 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16104 case COMPOUND_EXPR:
16105 case MODIFY_EXPR:
16106 case BIND_EXPR:
16107 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16108 strict_overflow_p);
16110 case SAVE_EXPR:
16111 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16112 strict_overflow_p);
16114 case CALL_EXPR:
16115 return alloca_call_p (t);
16117 default:
16118 break;
16120 return false;
16123 /* Return true when T is an address and is known to be nonzero.
16124 Handle warnings about undefined signed overflow. */
16126 bool
16127 tree_expr_nonzero_p (tree t)
16129 bool ret, strict_overflow_p;
16131 strict_overflow_p = false;
16132 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16133 if (strict_overflow_p)
16134 fold_overflow_warning (("assuming signed overflow does not occur when "
16135 "determining that expression is always "
16136 "non-zero"),
16137 WARN_STRICT_OVERFLOW_MISC);
16138 return ret;
16141 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16142 attempt to fold the expression to a constant without modifying TYPE,
16143 OP0 or OP1.
16145 If the expression could be simplified to a constant, then return
16146 the constant. If the expression would not be simplified to a
16147 constant, then return NULL_TREE. */
16149 tree
16150 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16152 tree tem = fold_binary (code, type, op0, op1);
16153 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16156 /* Given the components of a unary expression CODE, TYPE and OP0,
16157 attempt to fold the expression to a constant without modifying
16158 TYPE or OP0.
16160 If the expression could be simplified to a constant, then return
16161 the constant. If the expression would not be simplified to a
16162 constant, then return NULL_TREE. */
16164 tree
16165 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16167 tree tem = fold_unary (code, type, op0);
16168 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16171 /* If EXP represents referencing an element in a constant string
16172 (either via pointer arithmetic or array indexing), return the
16173 tree representing the value accessed, otherwise return NULL. */
16175 tree
16176 fold_read_from_constant_string (tree exp)
16178 if ((TREE_CODE (exp) == INDIRECT_REF
16179 || TREE_CODE (exp) == ARRAY_REF)
16180 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16182 tree exp1 = TREE_OPERAND (exp, 0);
16183 tree index;
16184 tree string;
16185 location_t loc = EXPR_LOCATION (exp);
16187 if (TREE_CODE (exp) == INDIRECT_REF)
16188 string = string_constant (exp1, &index);
16189 else
16191 tree low_bound = array_ref_low_bound (exp);
16192 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16194 /* Optimize the special-case of a zero lower bound.
16196 We convert the low_bound to sizetype to avoid some problems
16197 with constant folding. (E.g. suppose the lower bound is 1,
16198 and its mode is QI. Without the conversion,l (ARRAY
16199 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16200 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16201 if (! integer_zerop (low_bound))
16202 index = size_diffop_loc (loc, index,
16203 fold_convert_loc (loc, sizetype, low_bound));
16205 string = exp1;
16208 if (string
16209 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16210 && TREE_CODE (string) == STRING_CST
16211 && TREE_CODE (index) == INTEGER_CST
16212 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16213 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16214 == MODE_INT)
16215 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16216 return build_int_cst_type (TREE_TYPE (exp),
16217 (TREE_STRING_POINTER (string)
16218 [TREE_INT_CST_LOW (index)]));
16220 return NULL;
16223 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16224 an integer constant, real, or fixed-point constant.
16226 TYPE is the type of the result. */
16228 static tree
16229 fold_negate_const (tree arg0, tree type)
16231 tree t = NULL_TREE;
16233 switch (TREE_CODE (arg0))
16235 case INTEGER_CST:
16237 double_int val = tree_to_double_int (arg0);
16238 bool overflow;
16239 val = val.neg_with_overflow (&overflow);
16240 t = force_fit_type_double (type, val, 1,
16241 (overflow | TREE_OVERFLOW (arg0))
16242 && !TYPE_UNSIGNED (type));
16243 break;
16246 case REAL_CST:
16247 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16248 break;
16250 case FIXED_CST:
16252 FIXED_VALUE_TYPE f;
16253 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16254 &(TREE_FIXED_CST (arg0)), NULL,
16255 TYPE_SATURATING (type));
16256 t = build_fixed (type, f);
16257 /* Propagate overflow flags. */
16258 if (overflow_p | TREE_OVERFLOW (arg0))
16259 TREE_OVERFLOW (t) = 1;
16260 break;
16263 default:
16264 gcc_unreachable ();
16267 return t;
16270 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16271 an integer constant or real constant.
16273 TYPE is the type of the result. */
16275 tree
16276 fold_abs_const (tree arg0, tree type)
16278 tree t = NULL_TREE;
16280 switch (TREE_CODE (arg0))
16282 case INTEGER_CST:
16284 double_int val = tree_to_double_int (arg0);
16286 /* If the value is unsigned or non-negative, then the absolute value
16287 is the same as the ordinary value. */
16288 if (TYPE_UNSIGNED (type)
16289 || !val.is_negative ())
16290 t = arg0;
16292 /* If the value is negative, then the absolute value is
16293 its negation. */
16294 else
16296 bool overflow;
16297 val = val.neg_with_overflow (&overflow);
16298 t = force_fit_type_double (type, val, -1,
16299 overflow | TREE_OVERFLOW (arg0));
16302 break;
16304 case REAL_CST:
16305 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16306 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16307 else
16308 t = arg0;
16309 break;
16311 default:
16312 gcc_unreachable ();
16315 return t;
16318 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16319 constant. TYPE is the type of the result. */
16321 static tree
16322 fold_not_const (const_tree arg0, tree type)
16324 double_int val;
16326 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16328 val = ~tree_to_double_int (arg0);
16329 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16332 /* Given CODE, a relational operator, the target type, TYPE and two
16333 constant operands OP0 and OP1, return the result of the
16334 relational operation. If the result is not a compile time
16335 constant, then return NULL_TREE. */
16337 static tree
16338 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16340 int result, invert;
16342 /* From here on, the only cases we handle are when the result is
16343 known to be a constant. */
16345 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16347 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16348 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16350 /* Handle the cases where either operand is a NaN. */
16351 if (real_isnan (c0) || real_isnan (c1))
16353 switch (code)
16355 case EQ_EXPR:
16356 case ORDERED_EXPR:
16357 result = 0;
16358 break;
16360 case NE_EXPR:
16361 case UNORDERED_EXPR:
16362 case UNLT_EXPR:
16363 case UNLE_EXPR:
16364 case UNGT_EXPR:
16365 case UNGE_EXPR:
16366 case UNEQ_EXPR:
16367 result = 1;
16368 break;
16370 case LT_EXPR:
16371 case LE_EXPR:
16372 case GT_EXPR:
16373 case GE_EXPR:
16374 case LTGT_EXPR:
16375 if (flag_trapping_math)
16376 return NULL_TREE;
16377 result = 0;
16378 break;
16380 default:
16381 gcc_unreachable ();
16384 return constant_boolean_node (result, type);
16387 return constant_boolean_node (real_compare (code, c0, c1), type);
16390 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16392 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16393 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16394 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16397 /* Handle equality/inequality of complex constants. */
16398 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16400 tree rcond = fold_relational_const (code, type,
16401 TREE_REALPART (op0),
16402 TREE_REALPART (op1));
16403 tree icond = fold_relational_const (code, type,
16404 TREE_IMAGPART (op0),
16405 TREE_IMAGPART (op1));
16406 if (code == EQ_EXPR)
16407 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16408 else if (code == NE_EXPR)
16409 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16410 else
16411 return NULL_TREE;
16414 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16416 unsigned count = VECTOR_CST_NELTS (op0);
16417 tree *elts = XALLOCAVEC (tree, count);
16418 gcc_assert (VECTOR_CST_NELTS (op1) == count
16419 && TYPE_VECTOR_SUBPARTS (type) == count);
16421 for (unsigned i = 0; i < count; i++)
16423 tree elem_type = TREE_TYPE (type);
16424 tree elem0 = VECTOR_CST_ELT (op0, i);
16425 tree elem1 = VECTOR_CST_ELT (op1, i);
16427 tree tem = fold_relational_const (code, elem_type,
16428 elem0, elem1);
16430 if (tem == NULL_TREE)
16431 return NULL_TREE;
16433 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16436 return build_vector (type, elts);
16439 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16441 To compute GT, swap the arguments and do LT.
16442 To compute GE, do LT and invert the result.
16443 To compute LE, swap the arguments, do LT and invert the result.
16444 To compute NE, do EQ and invert the result.
16446 Therefore, the code below must handle only EQ and LT. */
16448 if (code == LE_EXPR || code == GT_EXPR)
16450 tree tem = op0;
16451 op0 = op1;
16452 op1 = tem;
16453 code = swap_tree_comparison (code);
16456 /* Note that it is safe to invert for real values here because we
16457 have already handled the one case that it matters. */
16459 invert = 0;
16460 if (code == NE_EXPR || code == GE_EXPR)
16462 invert = 1;
16463 code = invert_tree_comparison (code, false);
16466 /* Compute a result for LT or EQ if args permit;
16467 Otherwise return T. */
16468 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16470 if (code == EQ_EXPR)
16471 result = tree_int_cst_equal (op0, op1);
16472 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16473 result = INT_CST_LT_UNSIGNED (op0, op1);
16474 else
16475 result = INT_CST_LT (op0, op1);
16477 else
16478 return NULL_TREE;
16480 if (invert)
16481 result ^= 1;
16482 return constant_boolean_node (result, type);
16485 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16486 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16487 itself. */
16489 tree
16490 fold_build_cleanup_point_expr (tree type, tree expr)
16492 /* If the expression does not have side effects then we don't have to wrap
16493 it with a cleanup point expression. */
16494 if (!TREE_SIDE_EFFECTS (expr))
16495 return expr;
16497 /* If the expression is a return, check to see if the expression inside the
16498 return has no side effects or the right hand side of the modify expression
16499 inside the return. If either don't have side effects set we don't need to
16500 wrap the expression in a cleanup point expression. Note we don't check the
16501 left hand side of the modify because it should always be a return decl. */
16502 if (TREE_CODE (expr) == RETURN_EXPR)
16504 tree op = TREE_OPERAND (expr, 0);
16505 if (!op || !TREE_SIDE_EFFECTS (op))
16506 return expr;
16507 op = TREE_OPERAND (op, 1);
16508 if (!TREE_SIDE_EFFECTS (op))
16509 return expr;
16512 return build1 (CLEANUP_POINT_EXPR, type, expr);
16515 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16516 of an indirection through OP0, or NULL_TREE if no simplification is
16517 possible. */
16519 tree
16520 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16522 tree sub = op0;
16523 tree subtype;
16525 STRIP_NOPS (sub);
16526 subtype = TREE_TYPE (sub);
16527 if (!POINTER_TYPE_P (subtype))
16528 return NULL_TREE;
16530 if (TREE_CODE (sub) == ADDR_EXPR)
16532 tree op = TREE_OPERAND (sub, 0);
16533 tree optype = TREE_TYPE (op);
16534 /* *&CONST_DECL -> to the value of the const decl. */
16535 if (TREE_CODE (op) == CONST_DECL)
16536 return DECL_INITIAL (op);
16537 /* *&p => p; make sure to handle *&"str"[cst] here. */
16538 if (type == optype)
16540 tree fop = fold_read_from_constant_string (op);
16541 if (fop)
16542 return fop;
16543 else
16544 return op;
16546 /* *(foo *)&fooarray => fooarray[0] */
16547 else if (TREE_CODE (optype) == ARRAY_TYPE
16548 && type == TREE_TYPE (optype)
16549 && (!in_gimple_form
16550 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16552 tree type_domain = TYPE_DOMAIN (optype);
16553 tree min_val = size_zero_node;
16554 if (type_domain && TYPE_MIN_VALUE (type_domain))
16555 min_val = TYPE_MIN_VALUE (type_domain);
16556 if (in_gimple_form
16557 && TREE_CODE (min_val) != INTEGER_CST)
16558 return NULL_TREE;
16559 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16560 NULL_TREE, NULL_TREE);
16562 /* *(foo *)&complexfoo => __real__ complexfoo */
16563 else if (TREE_CODE (optype) == COMPLEX_TYPE
16564 && type == TREE_TYPE (optype))
16565 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16566 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16567 else if (TREE_CODE (optype) == VECTOR_TYPE
16568 && type == TREE_TYPE (optype))
16570 tree part_width = TYPE_SIZE (type);
16571 tree index = bitsize_int (0);
16572 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16576 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16577 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16579 tree op00 = TREE_OPERAND (sub, 0);
16580 tree op01 = TREE_OPERAND (sub, 1);
16582 STRIP_NOPS (op00);
16583 if (TREE_CODE (op00) == ADDR_EXPR)
16585 tree op00type;
16586 op00 = TREE_OPERAND (op00, 0);
16587 op00type = TREE_TYPE (op00);
16589 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16590 if (TREE_CODE (op00type) == VECTOR_TYPE
16591 && type == TREE_TYPE (op00type))
16593 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16594 tree part_width = TYPE_SIZE (type);
16595 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16596 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16597 tree index = bitsize_int (indexi);
16599 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16600 return fold_build3_loc (loc,
16601 BIT_FIELD_REF, type, op00,
16602 part_width, index);
16605 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16606 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16607 && type == TREE_TYPE (op00type))
16609 tree size = TYPE_SIZE_UNIT (type);
16610 if (tree_int_cst_equal (size, op01))
16611 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16613 /* ((foo *)&fooarray)[1] => fooarray[1] */
16614 else if (TREE_CODE (op00type) == ARRAY_TYPE
16615 && type == TREE_TYPE (op00type))
16617 tree type_domain = TYPE_DOMAIN (op00type);
16618 tree min_val = size_zero_node;
16619 if (type_domain && TYPE_MIN_VALUE (type_domain))
16620 min_val = TYPE_MIN_VALUE (type_domain);
16621 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16622 TYPE_SIZE_UNIT (type));
16623 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16624 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16625 NULL_TREE, NULL_TREE);
16630 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16631 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16632 && type == TREE_TYPE (TREE_TYPE (subtype))
16633 && (!in_gimple_form
16634 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16636 tree type_domain;
16637 tree min_val = size_zero_node;
16638 sub = build_fold_indirect_ref_loc (loc, sub);
16639 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16640 if (type_domain && TYPE_MIN_VALUE (type_domain))
16641 min_val = TYPE_MIN_VALUE (type_domain);
16642 if (in_gimple_form
16643 && TREE_CODE (min_val) != INTEGER_CST)
16644 return NULL_TREE;
16645 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16646 NULL_TREE);
16649 return NULL_TREE;
16652 /* Builds an expression for an indirection through T, simplifying some
16653 cases. */
16655 tree
16656 build_fold_indirect_ref_loc (location_t loc, tree t)
16658 tree type = TREE_TYPE (TREE_TYPE (t));
16659 tree sub = fold_indirect_ref_1 (loc, type, t);
16661 if (sub)
16662 return sub;
16664 return build1_loc (loc, INDIRECT_REF, type, t);
16667 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16669 tree
16670 fold_indirect_ref_loc (location_t loc, tree t)
16672 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16674 if (sub)
16675 return sub;
16676 else
16677 return t;
16680 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16681 whose result is ignored. The type of the returned tree need not be
16682 the same as the original expression. */
16684 tree
16685 fold_ignored_result (tree t)
16687 if (!TREE_SIDE_EFFECTS (t))
16688 return integer_zero_node;
16690 for (;;)
16691 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16693 case tcc_unary:
16694 t = TREE_OPERAND (t, 0);
16695 break;
16697 case tcc_binary:
16698 case tcc_comparison:
16699 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16700 t = TREE_OPERAND (t, 0);
16701 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16702 t = TREE_OPERAND (t, 1);
16703 else
16704 return t;
16705 break;
16707 case tcc_expression:
16708 switch (TREE_CODE (t))
16710 case COMPOUND_EXPR:
16711 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16712 return t;
16713 t = TREE_OPERAND (t, 0);
16714 break;
16716 case COND_EXPR:
16717 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16718 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16719 return t;
16720 t = TREE_OPERAND (t, 0);
16721 break;
16723 default:
16724 return t;
16726 break;
16728 default:
16729 return t;
16733 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16734 This can only be applied to objects of a sizetype. */
16736 tree
16737 round_up_loc (location_t loc, tree value, int divisor)
16739 tree div = NULL_TREE;
16741 gcc_assert (divisor > 0);
16742 if (divisor == 1)
16743 return value;
16745 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16746 have to do anything. Only do this when we are not given a const,
16747 because in that case, this check is more expensive than just
16748 doing it. */
16749 if (TREE_CODE (value) != INTEGER_CST)
16751 div = build_int_cst (TREE_TYPE (value), divisor);
16753 if (multiple_of_p (TREE_TYPE (value), value, div))
16754 return value;
16757 /* If divisor is a power of two, simplify this to bit manipulation. */
16758 if (divisor == (divisor & -divisor))
16760 if (TREE_CODE (value) == INTEGER_CST)
16762 double_int val = tree_to_double_int (value);
16763 bool overflow_p;
16765 if ((val.low & (divisor - 1)) == 0)
16766 return value;
16768 overflow_p = TREE_OVERFLOW (value);
16769 val.low &= ~(divisor - 1);
16770 val.low += divisor;
16771 if (val.low == 0)
16773 val.high++;
16774 if (val.high == 0)
16775 overflow_p = true;
16778 return force_fit_type_double (TREE_TYPE (value), val,
16779 -1, overflow_p);
16781 else
16783 tree t;
16785 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16786 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16787 t = build_int_cst (TREE_TYPE (value), -divisor);
16788 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16791 else
16793 if (!div)
16794 div = build_int_cst (TREE_TYPE (value), divisor);
16795 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16796 value = size_binop_loc (loc, MULT_EXPR, value, div);
16799 return value;
16802 /* Likewise, but round down. */
16804 tree
16805 round_down_loc (location_t loc, tree value, int divisor)
16807 tree div = NULL_TREE;
16809 gcc_assert (divisor > 0);
16810 if (divisor == 1)
16811 return value;
16813 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16814 have to do anything. Only do this when we are not given a const,
16815 because in that case, this check is more expensive than just
16816 doing it. */
16817 if (TREE_CODE (value) != INTEGER_CST)
16819 div = build_int_cst (TREE_TYPE (value), divisor);
16821 if (multiple_of_p (TREE_TYPE (value), value, div))
16822 return value;
16825 /* If divisor is a power of two, simplify this to bit manipulation. */
16826 if (divisor == (divisor & -divisor))
16828 tree t;
16830 t = build_int_cst (TREE_TYPE (value), -divisor);
16831 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16833 else
16835 if (!div)
16836 div = build_int_cst (TREE_TYPE (value), divisor);
16837 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16838 value = size_binop_loc (loc, MULT_EXPR, value, div);
16841 return value;
16844 /* Returns the pointer to the base of the object addressed by EXP and
16845 extracts the information about the offset of the access, storing it
16846 to PBITPOS and POFFSET. */
16848 static tree
16849 split_address_to_core_and_offset (tree exp,
16850 HOST_WIDE_INT *pbitpos, tree *poffset)
16852 tree core;
16853 enum machine_mode mode;
16854 int unsignedp, volatilep;
16855 HOST_WIDE_INT bitsize;
16856 location_t loc = EXPR_LOCATION (exp);
16858 if (TREE_CODE (exp) == ADDR_EXPR)
16860 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16861 poffset, &mode, &unsignedp, &volatilep,
16862 false);
16863 core = build_fold_addr_expr_loc (loc, core);
16865 else
16867 core = exp;
16868 *pbitpos = 0;
16869 *poffset = NULL_TREE;
16872 return core;
16875 /* Returns true if addresses of E1 and E2 differ by a constant, false
16876 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16878 bool
16879 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16881 tree core1, core2;
16882 HOST_WIDE_INT bitpos1, bitpos2;
16883 tree toffset1, toffset2, tdiff, type;
16885 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16886 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16888 if (bitpos1 % BITS_PER_UNIT != 0
16889 || bitpos2 % BITS_PER_UNIT != 0
16890 || !operand_equal_p (core1, core2, 0))
16891 return false;
16893 if (toffset1 && toffset2)
16895 type = TREE_TYPE (toffset1);
16896 if (type != TREE_TYPE (toffset2))
16897 toffset2 = fold_convert (type, toffset2);
16899 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16900 if (!cst_and_fits_in_hwi (tdiff))
16901 return false;
16903 *diff = int_cst_value (tdiff);
16905 else if (toffset1 || toffset2)
16907 /* If only one of the offsets is non-constant, the difference cannot
16908 be a constant. */
16909 return false;
16911 else
16912 *diff = 0;
16914 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16915 return true;
16918 /* Simplify the floating point expression EXP when the sign of the
16919 result is not significant. Return NULL_TREE if no simplification
16920 is possible. */
16922 tree
16923 fold_strip_sign_ops (tree exp)
16925 tree arg0, arg1;
16926 location_t loc = EXPR_LOCATION (exp);
16928 switch (TREE_CODE (exp))
16930 case ABS_EXPR:
16931 case NEGATE_EXPR:
16932 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16933 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16935 case MULT_EXPR:
16936 case RDIV_EXPR:
16937 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16938 return NULL_TREE;
16939 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16940 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16941 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16942 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16943 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16944 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16945 break;
16947 case COMPOUND_EXPR:
16948 arg0 = TREE_OPERAND (exp, 0);
16949 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16950 if (arg1)
16951 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16952 break;
16954 case COND_EXPR:
16955 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16956 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16957 if (arg0 || arg1)
16958 return fold_build3_loc (loc,
16959 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16960 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16961 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16962 break;
16964 case CALL_EXPR:
16966 const enum built_in_function fcode = builtin_mathfn_code (exp);
16967 switch (fcode)
16969 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16970 /* Strip copysign function call, return the 1st argument. */
16971 arg0 = CALL_EXPR_ARG (exp, 0);
16972 arg1 = CALL_EXPR_ARG (exp, 1);
16973 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16975 default:
16976 /* Strip sign ops from the argument of "odd" math functions. */
16977 if (negate_mathfn_p (fcode))
16979 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16980 if (arg0)
16981 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16983 break;
16986 break;
16988 default:
16989 break;
16991 return NULL_TREE;