* g++.dg/cpp0x/constexpr-53094-2.C: Ignore non-standard ABI
[official-gcc.git] / gcc / fold-const.c
blob855f08fdf75c40779378ee1a197978d39bda697b
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
454 /* Fall through. */
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
478 case NOP_EXPR:
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type) == REAL_TYPE)
482 tree tem = strip_float_extensions (t);
483 if (tem != t)
484 return negate_expr_p (tem);
486 break;
488 case CALL_EXPR:
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t)))
491 return negate_expr_p (CALL_EXPR_ARG (t, 0));
492 break;
494 case RSHIFT_EXPR:
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
498 tree op1 = TREE_OPERAND (t, 1);
499 if (TREE_INT_CST_HIGH (op1) == 0
500 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
501 == TREE_INT_CST_LOW (op1))
502 return true;
504 break;
506 default:
507 break;
509 return false;
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
515 returned. */
517 static tree
518 fold_negate_expr (location_t loc, tree t)
520 tree type = TREE_TYPE (t);
521 tree tem;
523 switch (TREE_CODE (t))
525 /* Convert - (~A) to A + 1. */
526 case BIT_NOT_EXPR:
527 if (INTEGRAL_TYPE_P (type))
528 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
529 build_int_cst (type, 1));
530 break;
532 case INTEGER_CST:
533 tem = fold_negate_const (t, type);
534 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
535 || !TYPE_OVERFLOW_TRAPS (type))
536 return tem;
537 break;
539 case REAL_CST:
540 tem = fold_negate_const (t, type);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 return tem;
544 break;
546 case FIXED_CST:
547 tem = fold_negate_const (t, type);
548 return tem;
550 case COMPLEX_CST:
552 tree rpart = negate_expr (TREE_REALPART (t));
553 tree ipart = negate_expr (TREE_IMAGPART (t));
555 if ((TREE_CODE (rpart) == REAL_CST
556 && TREE_CODE (ipart) == REAL_CST)
557 || (TREE_CODE (rpart) == INTEGER_CST
558 && TREE_CODE (ipart) == INTEGER_CST))
559 return build_complex (type, rpart, ipart);
561 break;
563 case COMPLEX_EXPR:
564 if (negate_expr_p (t))
565 return fold_build2_loc (loc, COMPLEX_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
567 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
568 break;
570 case CONJ_EXPR:
571 if (negate_expr_p (t))
572 return fold_build1_loc (loc, CONJ_EXPR, type,
573 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
574 break;
576 case NEGATE_EXPR:
577 return TREE_OPERAND (t, 0);
579 case PLUS_EXPR:
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t, 1))
585 && reorder_operands_p (TREE_OPERAND (t, 0),
586 TREE_OPERAND (t, 1)))
588 tem = negate_expr (TREE_OPERAND (t, 1));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t, 0)))
596 tem = negate_expr (TREE_OPERAND (t, 0));
597 return fold_build2_loc (loc, MINUS_EXPR, type,
598 tem, TREE_OPERAND (t, 1));
601 break;
603 case MINUS_EXPR:
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
607 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
610 break;
612 case MULT_EXPR:
613 if (TYPE_UNSIGNED (type))
614 break;
616 /* Fall through. */
618 case RDIV_EXPR:
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
621 tem = TREE_OPERAND (t, 1);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 TREE_OPERAND (t, 0), negate_expr (tem));
625 tem = TREE_OPERAND (t, 0);
626 if (negate_expr_p (tem))
627 return fold_build2_loc (loc, TREE_CODE (t), type,
628 negate_expr (tem), TREE_OPERAND (t, 1));
630 break;
632 case TRUNC_DIV_EXPR:
633 case ROUND_DIV_EXPR:
634 case FLOOR_DIV_EXPR:
635 case CEIL_DIV_EXPR:
636 case EXACT_DIV_EXPR:
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
641 overflow. */
642 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
644 const char * const warnmsg = G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
649 if (INTEGRAL_TYPE_P (type)
650 && (TREE_CODE (tem) != INTEGER_CST
651 || integer_onep (tem)))
652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
659 if (INTEGRAL_TYPE_P (type)
660 && (TREE_CODE (tem) != INTEGER_CST
661 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
667 break;
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
677 break;
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
684 tree fndecl, arg;
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
690 break;
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
696 tree op1 = TREE_OPERAND (t, 1);
697 if (TREE_INT_CST_HIGH (op1) == 0
698 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
699 == TREE_INT_CST_LOW (op1))
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
709 break;
711 default:
712 break;
715 return NULL_TREE;
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
720 return NULL_TREE. */
722 static tree
723 negate_expr (tree t)
725 tree type, tem;
726 location_t loc;
728 if (t == NULL_TREE)
729 return NULL_TREE;
731 loc = EXPR_LOCATION (t);
732 type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
735 tem = fold_negate_expr (loc, t);
736 if (!tem)
737 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
738 return fold_convert_loc (loc, type, tem);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
765 tree var = 0;
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
822 else if (TREE_CODE (in) == BIT_NOT_EXPR
823 && code == PLUS_EXPR)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp = build_one_cst (TREE_TYPE (in));
827 var = negate_expr (TREE_OPERAND (in, 0));
829 else if (TREE_CONSTANT (in))
830 *conp = in;
831 else
832 var = in;
834 if (negate_p)
836 if (*litp)
837 *minus_litp = *litp, *litp = 0;
838 else if (*minus_litp)
839 *litp = *minus_litp, *minus_litp = 0;
840 *conp = negate_expr (*conp);
841 var = negate_expr (var);
844 return var;
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
852 static tree
853 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
855 if (t1 == 0)
856 return t2;
857 else if (t2 == 0)
858 return t1;
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
864 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
866 if (code == PLUS_EXPR)
868 if (TREE_CODE (t1) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t2),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t1, 0)));
873 else if (TREE_CODE (t2) == NEGATE_EXPR)
874 return build2_loc (loc, MINUS_EXPR, type,
875 fold_convert_loc (loc, type, t1),
876 fold_convert_loc (loc, type,
877 TREE_OPERAND (t2, 0)));
878 else if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
881 else if (code == MINUS_EXPR)
883 if (integer_zerop (t2))
884 return fold_convert_loc (loc, type, t1);
887 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
888 fold_convert_loc (loc, type, t2));
891 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
898 static bool
899 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
901 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
902 return false;
903 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
904 return false;
906 switch (code)
908 case LSHIFT_EXPR:
909 case RSHIFT_EXPR:
910 case LROTATE_EXPR:
911 case RROTATE_EXPR:
912 return true;
914 default:
915 break;
918 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
919 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
920 && TYPE_MODE (type1) == TYPE_MODE (type2);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
928 static tree
929 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
930 int overflowable)
932 double_int op1, op2, res, tmp;
933 tree t;
934 tree type = TREE_TYPE (arg1);
935 bool uns = TYPE_UNSIGNED (type);
936 bool overflow = false;
938 op1 = tree_to_double_int (arg1);
939 op2 = tree_to_double_int (arg2);
941 switch (code)
943 case BIT_IOR_EXPR:
944 res = op1 | op2;
945 break;
947 case BIT_XOR_EXPR:
948 res = op1 ^ op2;
949 break;
951 case BIT_AND_EXPR:
952 res = op1 & op2;
953 break;
955 case RSHIFT_EXPR:
956 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
957 break;
959 case LSHIFT_EXPR:
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
964 break;
966 case RROTATE_EXPR:
967 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
968 break;
970 case LROTATE_EXPR:
971 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
972 break;
974 case PLUS_EXPR:
975 res = op1.add_with_sign (op2, false, &overflow);
976 break;
978 case MINUS_EXPR:
979 res = op1.sub_with_overflow (op2, &overflow);
980 break;
982 case MULT_EXPR:
983 res = op1.mul_with_sign (op2, false, &overflow);
984 break;
986 case MULT_HIGHPART_EXPR:
987 /* ??? Need quad precision, or an additional shift operand
988 to the multiply primitive, to handle very large highparts. */
989 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
990 return NULL_TREE;
991 tmp = op1 - op2;
992 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
993 break;
995 case TRUNC_DIV_EXPR:
996 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
997 case EXACT_DIV_EXPR:
998 /* This is a shortcut for a common special case. */
999 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1000 && !TREE_OVERFLOW (arg1)
1001 && !TREE_OVERFLOW (arg2)
1002 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1004 if (code == CEIL_DIV_EXPR)
1005 op1.low += op2.low - 1;
1007 res.low = op1.low / op2.low, res.high = 0;
1008 break;
1011 /* ... fall through ... */
1013 case ROUND_DIV_EXPR:
1014 if (op2.is_zero ())
1015 return NULL_TREE;
1016 if (op2.is_one ())
1018 res = op1;
1019 break;
1021 if (op1 == op2 && !op1.is_zero ())
1023 res = double_int_one;
1024 break;
1026 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1027 break;
1029 case TRUNC_MOD_EXPR:
1030 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1031 /* This is a shortcut for a common special case. */
1032 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1033 && !TREE_OVERFLOW (arg1)
1034 && !TREE_OVERFLOW (arg2)
1035 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1037 if (code == CEIL_MOD_EXPR)
1038 op1.low += op2.low - 1;
1039 res.low = op1.low % op2.low, res.high = 0;
1040 break;
1043 /* ... fall through ... */
1045 case ROUND_MOD_EXPR:
1046 if (op2.is_zero ())
1047 return NULL_TREE;
1048 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1049 break;
1051 case MIN_EXPR:
1052 res = op1.min (op2, uns);
1053 break;
1055 case MAX_EXPR:
1056 res = op1.max (op2, uns);
1057 break;
1059 default:
1060 return NULL_TREE;
1063 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1064 (!uns && overflow)
1065 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1067 return t;
1070 tree
1071 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1073 return int_const_binop_1 (code, arg1, arg2, 1);
1076 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1077 constant. We assume ARG1 and ARG2 have the same data type, or at least
1078 are the same kind of constant and the same machine mode. Return zero if
1079 combining the constants is not allowed in the current operating mode. */
1081 static tree
1082 const_binop (enum tree_code code, tree arg1, tree arg2)
1084 /* Sanity check for the recursive cases. */
1085 if (!arg1 || !arg2)
1086 return NULL_TREE;
1088 STRIP_NOPS (arg1);
1089 STRIP_NOPS (arg2);
1091 if (TREE_CODE (arg1) == INTEGER_CST)
1092 return int_const_binop (code, arg1, arg2);
1094 if (TREE_CODE (arg1) == REAL_CST)
1096 enum machine_mode mode;
1097 REAL_VALUE_TYPE d1;
1098 REAL_VALUE_TYPE d2;
1099 REAL_VALUE_TYPE value;
1100 REAL_VALUE_TYPE result;
1101 bool inexact;
1102 tree t, type;
1104 /* The following codes are handled by real_arithmetic. */
1105 switch (code)
1107 case PLUS_EXPR:
1108 case MINUS_EXPR:
1109 case MULT_EXPR:
1110 case RDIV_EXPR:
1111 case MIN_EXPR:
1112 case MAX_EXPR:
1113 break;
1115 default:
1116 return NULL_TREE;
1119 d1 = TREE_REAL_CST (arg1);
1120 d2 = TREE_REAL_CST (arg2);
1122 type = TREE_TYPE (arg1);
1123 mode = TYPE_MODE (type);
1125 /* Don't perform operation if we honor signaling NaNs and
1126 either operand is a NaN. */
1127 if (HONOR_SNANS (mode)
1128 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1129 return NULL_TREE;
1131 /* Don't perform operation if it would raise a division
1132 by zero exception. */
1133 if (code == RDIV_EXPR
1134 && REAL_VALUES_EQUAL (d2, dconst0)
1135 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1136 return NULL_TREE;
1138 /* If either operand is a NaN, just return it. Otherwise, set up
1139 for floating-point trap; we return an overflow. */
1140 if (REAL_VALUE_ISNAN (d1))
1141 return arg1;
1142 else if (REAL_VALUE_ISNAN (d2))
1143 return arg2;
1145 inexact = real_arithmetic (&value, code, &d1, &d2);
1146 real_convert (&result, mode, &value);
1148 /* Don't constant fold this floating point operation if
1149 the result has overflowed and flag_trapping_math. */
1150 if (flag_trapping_math
1151 && MODE_HAS_INFINITIES (mode)
1152 && REAL_VALUE_ISINF (result)
1153 && !REAL_VALUE_ISINF (d1)
1154 && !REAL_VALUE_ISINF (d2))
1155 return NULL_TREE;
1157 /* Don't constant fold this floating point operation if the
1158 result may dependent upon the run-time rounding mode and
1159 flag_rounding_math is set, or if GCC's software emulation
1160 is unable to accurately represent the result. */
1161 if ((flag_rounding_math
1162 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1163 && (inexact || !real_identical (&result, &value)))
1164 return NULL_TREE;
1166 t = build_real (type, result);
1168 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1169 return t;
1172 if (TREE_CODE (arg1) == FIXED_CST)
1174 FIXED_VALUE_TYPE f1;
1175 FIXED_VALUE_TYPE f2;
1176 FIXED_VALUE_TYPE result;
1177 tree t, type;
1178 int sat_p;
1179 bool overflow_p;
1181 /* The following codes are handled by fixed_arithmetic. */
1182 switch (code)
1184 case PLUS_EXPR:
1185 case MINUS_EXPR:
1186 case MULT_EXPR:
1187 case TRUNC_DIV_EXPR:
1188 f2 = TREE_FIXED_CST (arg2);
1189 break;
1191 case LSHIFT_EXPR:
1192 case RSHIFT_EXPR:
1193 f2.data.high = TREE_INT_CST_HIGH (arg2);
1194 f2.data.low = TREE_INT_CST_LOW (arg2);
1195 f2.mode = SImode;
1196 break;
1198 default:
1199 return NULL_TREE;
1202 f1 = TREE_FIXED_CST (arg1);
1203 type = TREE_TYPE (arg1);
1204 sat_p = TYPE_SATURATING (type);
1205 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1206 t = build_fixed (type, result);
1207 /* Propagate overflow flags. */
1208 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1209 TREE_OVERFLOW (t) = 1;
1210 return t;
1213 if (TREE_CODE (arg1) == COMPLEX_CST)
1215 tree type = TREE_TYPE (arg1);
1216 tree r1 = TREE_REALPART (arg1);
1217 tree i1 = TREE_IMAGPART (arg1);
1218 tree r2 = TREE_REALPART (arg2);
1219 tree i2 = TREE_IMAGPART (arg2);
1220 tree real, imag;
1222 switch (code)
1224 case PLUS_EXPR:
1225 case MINUS_EXPR:
1226 real = const_binop (code, r1, r2);
1227 imag = const_binop (code, i1, i2);
1228 break;
1230 case MULT_EXPR:
1231 if (COMPLEX_FLOAT_TYPE_P (type))
1232 return do_mpc_arg2 (arg1, arg2, type,
1233 /* do_nonfinite= */ folding_initializer,
1234 mpc_mul);
1236 real = const_binop (MINUS_EXPR,
1237 const_binop (MULT_EXPR, r1, r2),
1238 const_binop (MULT_EXPR, i1, i2));
1239 imag = const_binop (PLUS_EXPR,
1240 const_binop (MULT_EXPR, r1, i2),
1241 const_binop (MULT_EXPR, i1, r2));
1242 break;
1244 case RDIV_EXPR:
1245 if (COMPLEX_FLOAT_TYPE_P (type))
1246 return do_mpc_arg2 (arg1, arg2, type,
1247 /* do_nonfinite= */ folding_initializer,
1248 mpc_div);
1249 /* Fallthru ... */
1250 case TRUNC_DIV_EXPR:
1251 case CEIL_DIV_EXPR:
1252 case FLOOR_DIV_EXPR:
1253 case ROUND_DIV_EXPR:
1254 if (flag_complex_method == 0)
1256 /* Keep this algorithm in sync with
1257 tree-complex.c:expand_complex_div_straight().
1259 Expand complex division to scalars, straightforward algorithm.
1260 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1261 t = br*br + bi*bi
1263 tree magsquared
1264 = const_binop (PLUS_EXPR,
1265 const_binop (MULT_EXPR, r2, r2),
1266 const_binop (MULT_EXPR, i2, i2));
1267 tree t1
1268 = const_binop (PLUS_EXPR,
1269 const_binop (MULT_EXPR, r1, r2),
1270 const_binop (MULT_EXPR, i1, i2));
1271 tree t2
1272 = const_binop (MINUS_EXPR,
1273 const_binop (MULT_EXPR, i1, r2),
1274 const_binop (MULT_EXPR, r1, i2));
1276 real = const_binop (code, t1, magsquared);
1277 imag = const_binop (code, t2, magsquared);
1279 else
1281 /* Keep this algorithm in sync with
1282 tree-complex.c:expand_complex_div_wide().
1284 Expand complex division to scalars, modified algorithm to minimize
1285 overflow with wide input ranges. */
1286 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1287 fold_abs_const (r2, TREE_TYPE (type)),
1288 fold_abs_const (i2, TREE_TYPE (type)));
1290 if (integer_nonzerop (compare))
1292 /* In the TRUE branch, we compute
1293 ratio = br/bi;
1294 div = (br * ratio) + bi;
1295 tr = (ar * ratio) + ai;
1296 ti = (ai * ratio) - ar;
1297 tr = tr / div;
1298 ti = ti / div; */
1299 tree ratio = const_binop (code, r2, i2);
1300 tree div = const_binop (PLUS_EXPR, i2,
1301 const_binop (MULT_EXPR, r2, ratio));
1302 real = const_binop (MULT_EXPR, r1, ratio);
1303 real = const_binop (PLUS_EXPR, real, i1);
1304 real = const_binop (code, real, div);
1306 imag = const_binop (MULT_EXPR, i1, ratio);
1307 imag = const_binop (MINUS_EXPR, imag, r1);
1308 imag = const_binop (code, imag, div);
1310 else
1312 /* In the FALSE branch, we compute
1313 ratio = d/c;
1314 divisor = (d * ratio) + c;
1315 tr = (b * ratio) + a;
1316 ti = b - (a * ratio);
1317 tr = tr / div;
1318 ti = ti / div; */
1319 tree ratio = const_binop (code, i2, r2);
1320 tree div = const_binop (PLUS_EXPR, r2,
1321 const_binop (MULT_EXPR, i2, ratio));
1323 real = const_binop (MULT_EXPR, i1, ratio);
1324 real = const_binop (PLUS_EXPR, real, r1);
1325 real = const_binop (code, real, div);
1327 imag = const_binop (MULT_EXPR, r1, ratio);
1328 imag = const_binop (MINUS_EXPR, i1, imag);
1329 imag = const_binop (code, imag, div);
1332 break;
1334 default:
1335 return NULL_TREE;
1338 if (real && imag)
1339 return build_complex (type, real, imag);
1342 if (TREE_CODE (arg1) == VECTOR_CST
1343 && TREE_CODE (arg2) == VECTOR_CST)
1345 tree type = TREE_TYPE(arg1);
1346 int count = TYPE_VECTOR_SUBPARTS (type), i;
1347 tree *elts = XALLOCAVEC (tree, count);
1349 for (i = 0; i < count; i++)
1351 tree elem1 = VECTOR_CST_ELT (arg1, i);
1352 tree elem2 = VECTOR_CST_ELT (arg2, i);
1354 elts[i] = const_binop (code, elem1, elem2);
1356 /* It is possible that const_binop cannot handle the given
1357 code and return NULL_TREE */
1358 if(elts[i] == NULL_TREE)
1359 return NULL_TREE;
1362 return build_vector (type, elts);
1364 return NULL_TREE;
1367 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1368 indicates which particular sizetype to create. */
1370 tree
1371 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1373 return build_int_cst (sizetype_tab[(int) kind], number);
1376 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1377 is a tree code. The type of the result is taken from the operands.
1378 Both must be equivalent integer types, ala int_binop_types_match_p.
1379 If the operands are constant, so is the result. */
1381 tree
1382 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1384 tree type = TREE_TYPE (arg0);
1386 if (arg0 == error_mark_node || arg1 == error_mark_node)
1387 return error_mark_node;
1389 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1390 TREE_TYPE (arg1)));
1392 /* Handle the special case of two integer constants faster. */
1393 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1395 /* And some specific cases even faster than that. */
1396 if (code == PLUS_EXPR)
1398 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1399 return arg1;
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MINUS_EXPR)
1405 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1406 return arg0;
1408 else if (code == MULT_EXPR)
1410 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1411 return arg1;
1414 /* Handle general case of two integer constants. For sizetype
1415 constant calculations we always want to know about overflow,
1416 even in the unsigned case. */
1417 return int_const_binop_1 (code, arg0, arg1, -1);
1420 return fold_build2_loc (loc, code, type, arg0, arg1);
1423 /* Given two values, either both of sizetype or both of bitsizetype,
1424 compute the difference between the two values. Return the value
1425 in signed type corresponding to the type of the operands. */
1427 tree
1428 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1430 tree type = TREE_TYPE (arg0);
1431 tree ctype;
1433 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1434 TREE_TYPE (arg1)));
1436 /* If the type is already signed, just do the simple thing. */
1437 if (!TYPE_UNSIGNED (type))
1438 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1440 if (type == sizetype)
1441 ctype = ssizetype;
1442 else if (type == bitsizetype)
1443 ctype = sbitsizetype;
1444 else
1445 ctype = signed_type_for (type);
1447 /* If either operand is not a constant, do the conversions to the signed
1448 type and subtract. The hardware will do the right thing with any
1449 overflow in the subtraction. */
1450 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1451 return size_binop_loc (loc, MINUS_EXPR,
1452 fold_convert_loc (loc, ctype, arg0),
1453 fold_convert_loc (loc, ctype, arg1));
1455 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1456 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1457 overflow) and negate (which can't either). Special-case a result
1458 of zero while we're here. */
1459 if (tree_int_cst_equal (arg0, arg1))
1460 return build_int_cst (ctype, 0);
1461 else if (tree_int_cst_lt (arg1, arg0))
1462 return fold_convert_loc (loc, ctype,
1463 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1464 else
1465 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1466 fold_convert_loc (loc, ctype,
1467 size_binop_loc (loc,
1468 MINUS_EXPR,
1469 arg1, arg0)));
1472 /* A subroutine of fold_convert_const handling conversions of an
1473 INTEGER_CST to another integer type. */
1475 static tree
1476 fold_convert_const_int_from_int (tree type, const_tree arg1)
1478 tree t;
1480 /* Given an integer constant, make new constant with new type,
1481 appropriately sign-extended or truncated. */
1482 t = force_fit_type_double (type, tree_to_double_int (arg1),
1483 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1484 (TREE_INT_CST_HIGH (arg1) < 0
1485 && (TYPE_UNSIGNED (type)
1486 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1487 | TREE_OVERFLOW (arg1));
1489 return t;
1492 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1493 to an integer type. */
1495 static tree
1496 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1498 int overflow = 0;
1499 tree t;
1501 /* The following code implements the floating point to integer
1502 conversion rules required by the Java Language Specification,
1503 that IEEE NaNs are mapped to zero and values that overflow
1504 the target precision saturate, i.e. values greater than
1505 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1506 are mapped to INT_MIN. These semantics are allowed by the
1507 C and C++ standards that simply state that the behavior of
1508 FP-to-integer conversion is unspecified upon overflow. */
1510 double_int val;
1511 REAL_VALUE_TYPE r;
1512 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1514 switch (code)
1516 case FIX_TRUNC_EXPR:
1517 real_trunc (&r, VOIDmode, &x);
1518 break;
1520 default:
1521 gcc_unreachable ();
1524 /* If R is NaN, return zero and show we have an overflow. */
1525 if (REAL_VALUE_ISNAN (r))
1527 overflow = 1;
1528 val = double_int_zero;
1531 /* See if R is less than the lower bound or greater than the
1532 upper bound. */
1534 if (! overflow)
1536 tree lt = TYPE_MIN_VALUE (type);
1537 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1538 if (REAL_VALUES_LESS (r, l))
1540 overflow = 1;
1541 val = tree_to_double_int (lt);
1545 if (! overflow)
1547 tree ut = TYPE_MAX_VALUE (type);
1548 if (ut)
1550 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1551 if (REAL_VALUES_LESS (u, r))
1553 overflow = 1;
1554 val = tree_to_double_int (ut);
1559 if (! overflow)
1560 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1562 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1563 return t;
1566 /* A subroutine of fold_convert_const handling conversions of a
1567 FIXED_CST to an integer type. */
1569 static tree
1570 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1572 tree t;
1573 double_int temp, temp_trunc;
1574 unsigned int mode;
1576 /* Right shift FIXED_CST to temp by fbit. */
1577 temp = TREE_FIXED_CST (arg1).data;
1578 mode = TREE_FIXED_CST (arg1).mode;
1579 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1581 temp = temp.rshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 /* Left shift temp to temp_trunc by fbit. */
1586 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1587 HOST_BITS_PER_DOUBLE_INT,
1588 SIGNED_FIXED_POINT_MODE_P (mode));
1590 else
1592 temp = double_int_zero;
1593 temp_trunc = double_int_zero;
1596 /* If FIXED_CST is negative, we need to round the value toward 0.
1597 By checking if the fractional bits are not zero to add 1 to temp. */
1598 if (SIGNED_FIXED_POINT_MODE_P (mode)
1599 && temp_trunc.is_negative ()
1600 && TREE_FIXED_CST (arg1).data != temp_trunc)
1601 temp += double_int_one;
1603 /* Given a fixed-point constant, make new constant with new type,
1604 appropriately sign-extended or truncated. */
1605 t = force_fit_type_double (type, temp, -1,
1606 (temp.is_negative ()
1607 && (TYPE_UNSIGNED (type)
1608 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1609 | TREE_OVERFLOW (arg1));
1611 return t;
1614 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1615 to another floating point type. */
1617 static tree
1618 fold_convert_const_real_from_real (tree type, const_tree arg1)
1620 REAL_VALUE_TYPE value;
1621 tree t;
1623 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1624 t = build_real (type, value);
1626 /* If converting an infinity or NAN to a representation that doesn't
1627 have one, set the overflow bit so that we can produce some kind of
1628 error message at the appropriate point if necessary. It's not the
1629 most user-friendly message, but it's better than nothing. */
1630 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1631 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1632 TREE_OVERFLOW (t) = 1;
1633 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1634 && !MODE_HAS_NANS (TYPE_MODE (type)))
1635 TREE_OVERFLOW (t) = 1;
1636 /* Regular overflow, conversion produced an infinity in a mode that
1637 can't represent them. */
1638 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1639 && REAL_VALUE_ISINF (value)
1640 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1641 TREE_OVERFLOW (t) = 1;
1642 else
1643 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1644 return t;
1647 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1648 to a floating point type. */
1650 static tree
1651 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1653 REAL_VALUE_TYPE value;
1654 tree t;
1656 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1657 t = build_real (type, value);
1659 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1660 return t;
1663 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1664 to another fixed-point type. */
1666 static tree
1667 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1669 FIXED_VALUE_TYPE value;
1670 tree t;
1671 bool overflow_p;
1673 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1674 TYPE_SATURATING (type));
1675 t = build_fixed (type, value);
1677 /* Propagate overflow flags. */
1678 if (overflow_p | TREE_OVERFLOW (arg1))
1679 TREE_OVERFLOW (t) = 1;
1680 return t;
1683 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1684 to a fixed-point type. */
1686 static tree
1687 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1689 FIXED_VALUE_TYPE value;
1690 tree t;
1691 bool overflow_p;
1693 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1694 TREE_INT_CST (arg1),
1695 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1696 TYPE_SATURATING (type));
1697 t = build_fixed (type, value);
1699 /* Propagate overflow flags. */
1700 if (overflow_p | TREE_OVERFLOW (arg1))
1701 TREE_OVERFLOW (t) = 1;
1702 return t;
1705 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1706 to a fixed-point type. */
1708 static tree
1709 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1711 FIXED_VALUE_TYPE value;
1712 tree t;
1713 bool overflow_p;
1715 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1716 &TREE_REAL_CST (arg1),
1717 TYPE_SATURATING (type));
1718 t = build_fixed (type, value);
1720 /* Propagate overflow flags. */
1721 if (overflow_p | TREE_OVERFLOW (arg1))
1722 TREE_OVERFLOW (t) = 1;
1723 return t;
1726 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1727 type TYPE. If no simplification can be done return NULL_TREE. */
1729 static tree
1730 fold_convert_const (enum tree_code code, tree type, tree arg1)
1732 if (TREE_TYPE (arg1) == type)
1733 return arg1;
1735 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1736 || TREE_CODE (type) == OFFSET_TYPE)
1738 if (TREE_CODE (arg1) == INTEGER_CST)
1739 return fold_convert_const_int_from_int (type, arg1);
1740 else if (TREE_CODE (arg1) == REAL_CST)
1741 return fold_convert_const_int_from_real (code, type, arg1);
1742 else if (TREE_CODE (arg1) == FIXED_CST)
1743 return fold_convert_const_int_from_fixed (type, arg1);
1745 else if (TREE_CODE (type) == REAL_TYPE)
1747 if (TREE_CODE (arg1) == INTEGER_CST)
1748 return build_real_from_int_cst (type, arg1);
1749 else if (TREE_CODE (arg1) == REAL_CST)
1750 return fold_convert_const_real_from_real (type, arg1);
1751 else if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_real_from_fixed (type, arg1);
1754 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1756 if (TREE_CODE (arg1) == FIXED_CST)
1757 return fold_convert_const_fixed_from_fixed (type, arg1);
1758 else if (TREE_CODE (arg1) == INTEGER_CST)
1759 return fold_convert_const_fixed_from_int (type, arg1);
1760 else if (TREE_CODE (arg1) == REAL_CST)
1761 return fold_convert_const_fixed_from_real (type, arg1);
1763 return NULL_TREE;
1766 /* Construct a vector of zero elements of vector type TYPE. */
1768 static tree
1769 build_zero_vector (tree type)
1771 tree t;
1773 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1774 return build_vector_from_val (type, t);
1777 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1779 bool
1780 fold_convertible_p (const_tree type, const_tree arg)
1782 tree orig = TREE_TYPE (arg);
1784 if (type == orig)
1785 return true;
1787 if (TREE_CODE (arg) == ERROR_MARK
1788 || TREE_CODE (type) == ERROR_MARK
1789 || TREE_CODE (orig) == ERROR_MARK)
1790 return false;
1792 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1793 return true;
1795 switch (TREE_CODE (type))
1797 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1798 case POINTER_TYPE: case REFERENCE_TYPE:
1799 case OFFSET_TYPE:
1800 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1801 || TREE_CODE (orig) == OFFSET_TYPE)
1802 return true;
1803 return (TREE_CODE (orig) == VECTOR_TYPE
1804 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1806 case REAL_TYPE:
1807 case FIXED_POINT_TYPE:
1808 case COMPLEX_TYPE:
1809 case VECTOR_TYPE:
1810 case VOID_TYPE:
1811 return TREE_CODE (type) == TREE_CODE (orig);
1813 default:
1814 return false;
1818 /* Convert expression ARG to type TYPE. Used by the middle-end for
1819 simple conversions in preference to calling the front-end's convert. */
1821 tree
1822 fold_convert_loc (location_t loc, tree type, tree arg)
1824 tree orig = TREE_TYPE (arg);
1825 tree tem;
1827 if (type == orig)
1828 return arg;
1830 if (TREE_CODE (arg) == ERROR_MARK
1831 || TREE_CODE (type) == ERROR_MARK
1832 || TREE_CODE (orig) == ERROR_MARK)
1833 return error_mark_node;
1835 switch (TREE_CODE (type))
1837 case POINTER_TYPE:
1838 case REFERENCE_TYPE:
1839 /* Handle conversions between pointers to different address spaces. */
1840 if (POINTER_TYPE_P (orig)
1841 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1842 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1843 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1844 /* fall through */
1846 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1847 case OFFSET_TYPE:
1848 if (TREE_CODE (arg) == INTEGER_CST)
1850 tem = fold_convert_const (NOP_EXPR, type, arg);
1851 if (tem != NULL_TREE)
1852 return tem;
1854 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1855 || TREE_CODE (orig) == OFFSET_TYPE)
1856 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1857 if (TREE_CODE (orig) == COMPLEX_TYPE)
1858 return fold_convert_loc (loc, type,
1859 fold_build1_loc (loc, REALPART_EXPR,
1860 TREE_TYPE (orig), arg));
1861 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1862 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1863 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1865 case REAL_TYPE:
1866 if (TREE_CODE (arg) == INTEGER_CST)
1868 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1869 if (tem != NULL_TREE)
1870 return tem;
1872 else if (TREE_CODE (arg) == REAL_CST)
1874 tem = fold_convert_const (NOP_EXPR, type, arg);
1875 if (tem != NULL_TREE)
1876 return tem;
1878 else if (TREE_CODE (arg) == FIXED_CST)
1880 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1881 if (tem != NULL_TREE)
1882 return tem;
1885 switch (TREE_CODE (orig))
1887 case INTEGER_TYPE:
1888 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1889 case POINTER_TYPE: case REFERENCE_TYPE:
1890 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1892 case REAL_TYPE:
1893 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 case FIXED_POINT_TYPE:
1896 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1898 case COMPLEX_TYPE:
1899 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1900 return fold_convert_loc (loc, type, tem);
1902 default:
1903 gcc_unreachable ();
1906 case FIXED_POINT_TYPE:
1907 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1908 || TREE_CODE (arg) == REAL_CST)
1910 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1912 goto fold_convert_exit;
1915 switch (TREE_CODE (orig))
1917 case FIXED_POINT_TYPE:
1918 case INTEGER_TYPE:
1919 case ENUMERAL_TYPE:
1920 case BOOLEAN_TYPE:
1921 case REAL_TYPE:
1922 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1924 case COMPLEX_TYPE:
1925 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1926 return fold_convert_loc (loc, type, tem);
1928 default:
1929 gcc_unreachable ();
1932 case COMPLEX_TYPE:
1933 switch (TREE_CODE (orig))
1935 case INTEGER_TYPE:
1936 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1937 case POINTER_TYPE: case REFERENCE_TYPE:
1938 case REAL_TYPE:
1939 case FIXED_POINT_TYPE:
1940 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1941 fold_convert_loc (loc, TREE_TYPE (type), arg),
1942 fold_convert_loc (loc, TREE_TYPE (type),
1943 integer_zero_node));
1944 case COMPLEX_TYPE:
1946 tree rpart, ipart;
1948 if (TREE_CODE (arg) == COMPLEX_EXPR)
1950 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1951 TREE_OPERAND (arg, 0));
1952 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1953 TREE_OPERAND (arg, 1));
1954 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1957 arg = save_expr (arg);
1958 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1959 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1960 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1961 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1962 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1965 default:
1966 gcc_unreachable ();
1969 case VECTOR_TYPE:
1970 if (integer_zerop (arg))
1971 return build_zero_vector (type);
1972 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1973 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1974 || TREE_CODE (orig) == VECTOR_TYPE);
1975 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1977 case VOID_TYPE:
1978 tem = fold_ignored_result (arg);
1979 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1981 default:
1982 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1983 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1984 gcc_unreachable ();
1986 fold_convert_exit:
1987 protected_set_expr_location_unshare (tem, loc);
1988 return tem;
1991 /* Return false if expr can be assumed not to be an lvalue, true
1992 otherwise. */
1994 static bool
1995 maybe_lvalue_p (const_tree x)
1997 /* We only need to wrap lvalue tree codes. */
1998 switch (TREE_CODE (x))
2000 case VAR_DECL:
2001 case PARM_DECL:
2002 case RESULT_DECL:
2003 case LABEL_DECL:
2004 case FUNCTION_DECL:
2005 case SSA_NAME:
2007 case COMPONENT_REF:
2008 case MEM_REF:
2009 case INDIRECT_REF:
2010 case ARRAY_REF:
2011 case ARRAY_RANGE_REF:
2012 case BIT_FIELD_REF:
2013 case OBJ_TYPE_REF:
2015 case REALPART_EXPR:
2016 case IMAGPART_EXPR:
2017 case PREINCREMENT_EXPR:
2018 case PREDECREMENT_EXPR:
2019 case SAVE_EXPR:
2020 case TRY_CATCH_EXPR:
2021 case WITH_CLEANUP_EXPR:
2022 case COMPOUND_EXPR:
2023 case MODIFY_EXPR:
2024 case TARGET_EXPR:
2025 case COND_EXPR:
2026 case BIND_EXPR:
2027 break;
2029 default:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2032 break;
2033 return false;
2036 return true;
2039 /* Return an expr equal to X but certainly not valid as an lvalue. */
2041 tree
2042 non_lvalue_loc (location_t loc, tree x)
2044 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2045 us. */
2046 if (in_gimple_form)
2047 return x;
2049 if (! maybe_lvalue_p (x))
2050 return x;
2051 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2054 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2055 Zero means allow extended lvalues. */
2057 int pedantic_lvalues;
2059 /* When pedantic, return an expr equal to X but certainly not valid as a
2060 pedantic lvalue. Otherwise, return X. */
2062 static tree
2063 pedantic_non_lvalue_loc (location_t loc, tree x)
2065 if (pedantic_lvalues)
2066 return non_lvalue_loc (loc, x);
2068 return protected_set_expr_location_unshare (x, loc);
2071 /* Given a tree comparison code, return the code that is the logical inverse.
2072 It is generally not safe to do this for floating-point comparisons, except
2073 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2074 ERROR_MARK in this case. */
2076 enum tree_code
2077 invert_tree_comparison (enum tree_code code, bool honor_nans)
2079 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2080 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2081 return ERROR_MARK;
2083 switch (code)
2085 case EQ_EXPR:
2086 return NE_EXPR;
2087 case NE_EXPR:
2088 return EQ_EXPR;
2089 case GT_EXPR:
2090 return honor_nans ? UNLE_EXPR : LE_EXPR;
2091 case GE_EXPR:
2092 return honor_nans ? UNLT_EXPR : LT_EXPR;
2093 case LT_EXPR:
2094 return honor_nans ? UNGE_EXPR : GE_EXPR;
2095 case LE_EXPR:
2096 return honor_nans ? UNGT_EXPR : GT_EXPR;
2097 case LTGT_EXPR:
2098 return UNEQ_EXPR;
2099 case UNEQ_EXPR:
2100 return LTGT_EXPR;
2101 case UNGT_EXPR:
2102 return LE_EXPR;
2103 case UNGE_EXPR:
2104 return LT_EXPR;
2105 case UNLT_EXPR:
2106 return GE_EXPR;
2107 case UNLE_EXPR:
2108 return GT_EXPR;
2109 case ORDERED_EXPR:
2110 return UNORDERED_EXPR;
2111 case UNORDERED_EXPR:
2112 return ORDERED_EXPR;
2113 default:
2114 gcc_unreachable ();
2118 /* Similar, but return the comparison that results if the operands are
2119 swapped. This is safe for floating-point. */
2121 enum tree_code
2122 swap_tree_comparison (enum tree_code code)
2124 switch (code)
2126 case EQ_EXPR:
2127 case NE_EXPR:
2128 case ORDERED_EXPR:
2129 case UNORDERED_EXPR:
2130 case LTGT_EXPR:
2131 case UNEQ_EXPR:
2132 return code;
2133 case GT_EXPR:
2134 return LT_EXPR;
2135 case GE_EXPR:
2136 return LE_EXPR;
2137 case LT_EXPR:
2138 return GT_EXPR;
2139 case LE_EXPR:
2140 return GE_EXPR;
2141 case UNGT_EXPR:
2142 return UNLT_EXPR;
2143 case UNGE_EXPR:
2144 return UNLE_EXPR;
2145 case UNLT_EXPR:
2146 return UNGT_EXPR;
2147 case UNLE_EXPR:
2148 return UNGE_EXPR;
2149 default:
2150 gcc_unreachable ();
2155 /* Convert a comparison tree code from an enum tree_code representation
2156 into a compcode bit-based encoding. This function is the inverse of
2157 compcode_to_comparison. */
2159 static enum comparison_code
2160 comparison_to_compcode (enum tree_code code)
2162 switch (code)
2164 case LT_EXPR:
2165 return COMPCODE_LT;
2166 case EQ_EXPR:
2167 return COMPCODE_EQ;
2168 case LE_EXPR:
2169 return COMPCODE_LE;
2170 case GT_EXPR:
2171 return COMPCODE_GT;
2172 case NE_EXPR:
2173 return COMPCODE_NE;
2174 case GE_EXPR:
2175 return COMPCODE_GE;
2176 case ORDERED_EXPR:
2177 return COMPCODE_ORD;
2178 case UNORDERED_EXPR:
2179 return COMPCODE_UNORD;
2180 case UNLT_EXPR:
2181 return COMPCODE_UNLT;
2182 case UNEQ_EXPR:
2183 return COMPCODE_UNEQ;
2184 case UNLE_EXPR:
2185 return COMPCODE_UNLE;
2186 case UNGT_EXPR:
2187 return COMPCODE_UNGT;
2188 case LTGT_EXPR:
2189 return COMPCODE_LTGT;
2190 case UNGE_EXPR:
2191 return COMPCODE_UNGE;
2192 default:
2193 gcc_unreachable ();
2197 /* Convert a compcode bit-based encoding of a comparison operator back
2198 to GCC's enum tree_code representation. This function is the
2199 inverse of comparison_to_compcode. */
2201 static enum tree_code
2202 compcode_to_comparison (enum comparison_code code)
2204 switch (code)
2206 case COMPCODE_LT:
2207 return LT_EXPR;
2208 case COMPCODE_EQ:
2209 return EQ_EXPR;
2210 case COMPCODE_LE:
2211 return LE_EXPR;
2212 case COMPCODE_GT:
2213 return GT_EXPR;
2214 case COMPCODE_NE:
2215 return NE_EXPR;
2216 case COMPCODE_GE:
2217 return GE_EXPR;
2218 case COMPCODE_ORD:
2219 return ORDERED_EXPR;
2220 case COMPCODE_UNORD:
2221 return UNORDERED_EXPR;
2222 case COMPCODE_UNLT:
2223 return UNLT_EXPR;
2224 case COMPCODE_UNEQ:
2225 return UNEQ_EXPR;
2226 case COMPCODE_UNLE:
2227 return UNLE_EXPR;
2228 case COMPCODE_UNGT:
2229 return UNGT_EXPR;
2230 case COMPCODE_LTGT:
2231 return LTGT_EXPR;
2232 case COMPCODE_UNGE:
2233 return UNGE_EXPR;
2234 default:
2235 gcc_unreachable ();
2239 /* Return a tree for the comparison which is the combination of
2240 doing the AND or OR (depending on CODE) of the two operations LCODE
2241 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2242 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2243 if this makes the transformation invalid. */
2245 tree
2246 combine_comparisons (location_t loc,
2247 enum tree_code code, enum tree_code lcode,
2248 enum tree_code rcode, tree truth_type,
2249 tree ll_arg, tree lr_arg)
2251 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2252 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2253 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2254 int compcode;
2256 switch (code)
2258 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2259 compcode = lcompcode & rcompcode;
2260 break;
2262 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2263 compcode = lcompcode | rcompcode;
2264 break;
2266 default:
2267 return NULL_TREE;
2270 if (!honor_nans)
2272 /* Eliminate unordered comparisons, as well as LTGT and ORD
2273 which are not used unless the mode has NaNs. */
2274 compcode &= ~COMPCODE_UNORD;
2275 if (compcode == COMPCODE_LTGT)
2276 compcode = COMPCODE_NE;
2277 else if (compcode == COMPCODE_ORD)
2278 compcode = COMPCODE_TRUE;
2280 else if (flag_trapping_math)
2282 /* Check that the original operation and the optimized ones will trap
2283 under the same condition. */
2284 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2285 && (lcompcode != COMPCODE_EQ)
2286 && (lcompcode != COMPCODE_ORD);
2287 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2288 && (rcompcode != COMPCODE_EQ)
2289 && (rcompcode != COMPCODE_ORD);
2290 bool trap = (compcode & COMPCODE_UNORD) == 0
2291 && (compcode != COMPCODE_EQ)
2292 && (compcode != COMPCODE_ORD);
2294 /* In a short-circuited boolean expression the LHS might be
2295 such that the RHS, if evaluated, will never trap. For
2296 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2297 if neither x nor y is NaN. (This is a mixed blessing: for
2298 example, the expression above will never trap, hence
2299 optimizing it to x < y would be invalid). */
2300 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2301 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2302 rtrap = false;
2304 /* If the comparison was short-circuited, and only the RHS
2305 trapped, we may now generate a spurious trap. */
2306 if (rtrap && !ltrap
2307 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2308 return NULL_TREE;
2310 /* If we changed the conditions that cause a trap, we lose. */
2311 if ((ltrap || rtrap) != trap)
2312 return NULL_TREE;
2315 if (compcode == COMPCODE_TRUE)
2316 return constant_boolean_node (true, truth_type);
2317 else if (compcode == COMPCODE_FALSE)
2318 return constant_boolean_node (false, truth_type);
2319 else
2321 enum tree_code tcode;
2323 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2324 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2359 || TREE_TYPE (arg0) == error_mark_node
2360 || TREE_TYPE (arg1) == error_mark_node)
2361 return 0;
2363 /* Similar, if either does not have a type (like a released SSA name),
2364 they aren't equal. */
2365 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2366 return 0;
2368 /* Check equality of integer constants before bailing out due to
2369 precision differences. */
2370 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2371 return tree_int_cst_equal (arg0, arg1);
2373 /* If both types don't have the same signedness, then we can't consider
2374 them equal. We must check this before the STRIP_NOPS calls
2375 because they may change the signedness of the arguments. As pointers
2376 strictly don't have a signedness, require either two pointers or
2377 two non-pointers as well. */
2378 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2379 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2380 return 0;
2382 /* We cannot consider pointers to different address space equal. */
2383 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2384 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2385 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2386 return 0;
2388 /* If both types don't have the same precision, then it is not safe
2389 to strip NOPs. */
2390 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2391 return 0;
2393 STRIP_NOPS (arg0);
2394 STRIP_NOPS (arg1);
2396 /* In case both args are comparisons but with different comparison
2397 code, try to swap the comparison operands of one arg to produce
2398 a match and compare that variant. */
2399 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2400 && COMPARISON_CLASS_P (arg0)
2401 && COMPARISON_CLASS_P (arg1))
2403 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2405 if (TREE_CODE (arg0) == swap_code)
2406 return operand_equal_p (TREE_OPERAND (arg0, 0),
2407 TREE_OPERAND (arg1, 1), flags)
2408 && operand_equal_p (TREE_OPERAND (arg0, 1),
2409 TREE_OPERAND (arg1, 0), flags);
2412 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2413 /* This is needed for conversions and for COMPONENT_REF.
2414 Might as well play it safe and always test this. */
2415 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2416 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2417 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2418 return 0;
2420 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2421 We don't care about side effects in that case because the SAVE_EXPR
2422 takes care of that for us. In all other cases, two expressions are
2423 equal if they have no side effects. If we have two identical
2424 expressions with side effects that should be treated the same due
2425 to the only side effects being identical SAVE_EXPR's, that will
2426 be detected in the recursive calls below.
2427 If we are taking an invariant address of two identical objects
2428 they are necessarily equal as well. */
2429 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2430 && (TREE_CODE (arg0) == SAVE_EXPR
2431 || (flags & OEP_CONSTANT_ADDRESS_OF)
2432 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2433 return 1;
2435 /* Next handle constant cases, those for which we can return 1 even
2436 if ONLY_CONST is set. */
2437 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2438 switch (TREE_CODE (arg0))
2440 case INTEGER_CST:
2441 return tree_int_cst_equal (arg0, arg1);
2443 case FIXED_CST:
2444 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2445 TREE_FIXED_CST (arg1));
2447 case REAL_CST:
2448 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2449 TREE_REAL_CST (arg1)))
2450 return 1;
2453 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2455 /* If we do not distinguish between signed and unsigned zero,
2456 consider them equal. */
2457 if (real_zerop (arg0) && real_zerop (arg1))
2458 return 1;
2460 return 0;
2462 case VECTOR_CST:
2464 unsigned i;
2466 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2467 return 0;
2469 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2471 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2472 VECTOR_CST_ELT (arg1, i), flags))
2473 return 0;
2475 return 1;
2478 case COMPLEX_CST:
2479 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2480 flags)
2481 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2482 flags));
2484 case STRING_CST:
2485 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2486 && ! memcmp (TREE_STRING_POINTER (arg0),
2487 TREE_STRING_POINTER (arg1),
2488 TREE_STRING_LENGTH (arg0)));
2490 case ADDR_EXPR:
2491 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2492 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2493 ? OEP_CONSTANT_ADDRESS_OF : 0);
2494 default:
2495 break;
2498 if (flags & OEP_ONLY_CONST)
2499 return 0;
2501 /* Define macros to test an operand from arg0 and arg1 for equality and a
2502 variant that allows null and views null as being different from any
2503 non-null value. In the latter case, if either is null, the both
2504 must be; otherwise, do the normal comparison. */
2505 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2506 TREE_OPERAND (arg1, N), flags)
2508 #define OP_SAME_WITH_NULL(N) \
2509 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2510 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2512 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2514 case tcc_unary:
2515 /* Two conversions are equal only if signedness and modes match. */
2516 switch (TREE_CODE (arg0))
2518 CASE_CONVERT:
2519 case FIX_TRUNC_EXPR:
2520 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2521 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2522 return 0;
2523 break;
2524 default:
2525 break;
2528 return OP_SAME (0);
2531 case tcc_comparison:
2532 case tcc_binary:
2533 if (OP_SAME (0) && OP_SAME (1))
2534 return 1;
2536 /* For commutative ops, allow the other order. */
2537 return (commutative_tree_code (TREE_CODE (arg0))
2538 && operand_equal_p (TREE_OPERAND (arg0, 0),
2539 TREE_OPERAND (arg1, 1), flags)
2540 && operand_equal_p (TREE_OPERAND (arg0, 1),
2541 TREE_OPERAND (arg1, 0), flags));
2543 case tcc_reference:
2544 /* If either of the pointer (or reference) expressions we are
2545 dereferencing contain a side effect, these cannot be equal. */
2546 if (TREE_SIDE_EFFECTS (arg0)
2547 || TREE_SIDE_EFFECTS (arg1))
2548 return 0;
2550 switch (TREE_CODE (arg0))
2552 case INDIRECT_REF:
2553 case REALPART_EXPR:
2554 case IMAGPART_EXPR:
2555 return OP_SAME (0);
2557 case TARGET_MEM_REF:
2558 /* Require equal extra operands and then fall through to MEM_REF
2559 handling of the two common operands. */
2560 if (!OP_SAME_WITH_NULL (2)
2561 || !OP_SAME_WITH_NULL (3)
2562 || !OP_SAME_WITH_NULL (4))
2563 return 0;
2564 /* Fallthru. */
2565 case MEM_REF:
2566 /* Require equal access sizes, and similar pointer types.
2567 We can have incomplete types for array references of
2568 variable-sized arrays from the Fortran frontent
2569 though. */
2570 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2571 || (TYPE_SIZE (TREE_TYPE (arg0))
2572 && TYPE_SIZE (TREE_TYPE (arg1))
2573 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2574 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2575 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2576 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2577 && OP_SAME (0) && OP_SAME (1));
2579 case ARRAY_REF:
2580 case ARRAY_RANGE_REF:
2581 /* Operands 2 and 3 may be null.
2582 Compare the array index by value if it is constant first as we
2583 may have different types but same value here. */
2584 return (OP_SAME (0)
2585 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2586 TREE_OPERAND (arg1, 1))
2587 || OP_SAME (1))
2588 && OP_SAME_WITH_NULL (2)
2589 && OP_SAME_WITH_NULL (3));
2591 case COMPONENT_REF:
2592 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2593 may be NULL when we're called to compare MEM_EXPRs. */
2594 return OP_SAME_WITH_NULL (0)
2595 && OP_SAME (1)
2596 && OP_SAME_WITH_NULL (2);
2598 case BIT_FIELD_REF:
2599 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2601 default:
2602 return 0;
2605 case tcc_expression:
2606 switch (TREE_CODE (arg0))
2608 case ADDR_EXPR:
2609 case TRUTH_NOT_EXPR:
2610 return OP_SAME (0);
2612 case TRUTH_ANDIF_EXPR:
2613 case TRUTH_ORIF_EXPR:
2614 return OP_SAME (0) && OP_SAME (1);
2616 case FMA_EXPR:
2617 case WIDEN_MULT_PLUS_EXPR:
2618 case WIDEN_MULT_MINUS_EXPR:
2619 if (!OP_SAME (2))
2620 return 0;
2621 /* The multiplcation operands are commutative. */
2622 /* FALLTHRU */
2624 case TRUTH_AND_EXPR:
2625 case TRUTH_OR_EXPR:
2626 case TRUTH_XOR_EXPR:
2627 if (OP_SAME (0) && OP_SAME (1))
2628 return 1;
2630 /* Otherwise take into account this is a commutative operation. */
2631 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2632 TREE_OPERAND (arg1, 1), flags)
2633 && operand_equal_p (TREE_OPERAND (arg0, 1),
2634 TREE_OPERAND (arg1, 0), flags));
2636 case COND_EXPR:
2637 case VEC_COND_EXPR:
2638 case DOT_PROD_EXPR:
2639 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2641 default:
2642 return 0;
2645 case tcc_vl_exp:
2646 switch (TREE_CODE (arg0))
2648 case CALL_EXPR:
2649 /* If the CALL_EXPRs call different functions, then they
2650 clearly can not be equal. */
2651 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2652 flags))
2653 return 0;
2656 unsigned int cef = call_expr_flags (arg0);
2657 if (flags & OEP_PURE_SAME)
2658 cef &= ECF_CONST | ECF_PURE;
2659 else
2660 cef &= ECF_CONST;
2661 if (!cef)
2662 return 0;
2665 /* Now see if all the arguments are the same. */
2667 const_call_expr_arg_iterator iter0, iter1;
2668 const_tree a0, a1;
2669 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2670 a1 = first_const_call_expr_arg (arg1, &iter1);
2671 a0 && a1;
2672 a0 = next_const_call_expr_arg (&iter0),
2673 a1 = next_const_call_expr_arg (&iter1))
2674 if (! operand_equal_p (a0, a1, flags))
2675 return 0;
2677 /* If we get here and both argument lists are exhausted
2678 then the CALL_EXPRs are equal. */
2679 return ! (a0 || a1);
2681 default:
2682 return 0;
2685 case tcc_declaration:
2686 /* Consider __builtin_sqrt equal to sqrt. */
2687 return (TREE_CODE (arg0) == FUNCTION_DECL
2688 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2689 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2690 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2692 default:
2693 return 0;
2696 #undef OP_SAME
2697 #undef OP_SAME_WITH_NULL
2700 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2701 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2703 When in doubt, return 0. */
2705 static int
2706 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2708 int unsignedp1, unsignedpo;
2709 tree primarg0, primarg1, primother;
2710 unsigned int correct_width;
2712 if (operand_equal_p (arg0, arg1, 0))
2713 return 1;
2715 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2716 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2717 return 0;
2719 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2720 and see if the inner values are the same. This removes any
2721 signedness comparison, which doesn't matter here. */
2722 primarg0 = arg0, primarg1 = arg1;
2723 STRIP_NOPS (primarg0);
2724 STRIP_NOPS (primarg1);
2725 if (operand_equal_p (primarg0, primarg1, 0))
2726 return 1;
2728 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2729 actual comparison operand, ARG0.
2731 First throw away any conversions to wider types
2732 already present in the operands. */
2734 primarg1 = get_narrower (arg1, &unsignedp1);
2735 primother = get_narrower (other, &unsignedpo);
2737 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2738 if (unsignedp1 == unsignedpo
2739 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2740 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2742 tree type = TREE_TYPE (arg0);
2744 /* Make sure shorter operand is extended the right way
2745 to match the longer operand. */
2746 primarg1 = fold_convert (signed_or_unsigned_type_for
2747 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2749 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2750 return 1;
2753 return 0;
2756 /* See if ARG is an expression that is either a comparison or is performing
2757 arithmetic on comparisons. The comparisons must only be comparing
2758 two different values, which will be stored in *CVAL1 and *CVAL2; if
2759 they are nonzero it means that some operands have already been found.
2760 No variables may be used anywhere else in the expression except in the
2761 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2762 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2764 If this is true, return 1. Otherwise, return zero. */
2766 static int
2767 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2769 enum tree_code code = TREE_CODE (arg);
2770 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2772 /* We can handle some of the tcc_expression cases here. */
2773 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2774 tclass = tcc_unary;
2775 else if (tclass == tcc_expression
2776 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2777 || code == COMPOUND_EXPR))
2778 tclass = tcc_binary;
2780 else if (tclass == tcc_expression && code == SAVE_EXPR
2781 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2783 /* If we've already found a CVAL1 or CVAL2, this expression is
2784 two complex to handle. */
2785 if (*cval1 || *cval2)
2786 return 0;
2788 tclass = tcc_unary;
2789 *save_p = 1;
2792 switch (tclass)
2794 case tcc_unary:
2795 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2797 case tcc_binary:
2798 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2799 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2800 cval1, cval2, save_p));
2802 case tcc_constant:
2803 return 1;
2805 case tcc_expression:
2806 if (code == COND_EXPR)
2807 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2808 cval1, cval2, save_p)
2809 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2810 cval1, cval2, save_p)
2811 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2812 cval1, cval2, save_p));
2813 return 0;
2815 case tcc_comparison:
2816 /* First see if we can handle the first operand, then the second. For
2817 the second operand, we know *CVAL1 can't be zero. It must be that
2818 one side of the comparison is each of the values; test for the
2819 case where this isn't true by failing if the two operands
2820 are the same. */
2822 if (operand_equal_p (TREE_OPERAND (arg, 0),
2823 TREE_OPERAND (arg, 1), 0))
2824 return 0;
2826 if (*cval1 == 0)
2827 *cval1 = TREE_OPERAND (arg, 0);
2828 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2830 else if (*cval2 == 0)
2831 *cval2 = TREE_OPERAND (arg, 0);
2832 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2834 else
2835 return 0;
2837 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2839 else if (*cval2 == 0)
2840 *cval2 = TREE_OPERAND (arg, 1);
2841 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2843 else
2844 return 0;
2846 return 1;
2848 default:
2849 return 0;
2853 /* ARG is a tree that is known to contain just arithmetic operations and
2854 comparisons. Evaluate the operations in the tree substituting NEW0 for
2855 any occurrence of OLD0 as an operand of a comparison and likewise for
2856 NEW1 and OLD1. */
2858 static tree
2859 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2860 tree old1, tree new1)
2862 tree type = TREE_TYPE (arg);
2863 enum tree_code code = TREE_CODE (arg);
2864 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2866 /* We can handle some of the tcc_expression cases here. */
2867 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2868 tclass = tcc_unary;
2869 else if (tclass == tcc_expression
2870 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2871 tclass = tcc_binary;
2873 switch (tclass)
2875 case tcc_unary:
2876 return fold_build1_loc (loc, code, type,
2877 eval_subst (loc, TREE_OPERAND (arg, 0),
2878 old0, new0, old1, new1));
2880 case tcc_binary:
2881 return fold_build2_loc (loc, code, type,
2882 eval_subst (loc, TREE_OPERAND (arg, 0),
2883 old0, new0, old1, new1),
2884 eval_subst (loc, TREE_OPERAND (arg, 1),
2885 old0, new0, old1, new1));
2887 case tcc_expression:
2888 switch (code)
2890 case SAVE_EXPR:
2891 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2892 old1, new1);
2894 case COMPOUND_EXPR:
2895 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2896 old1, new1);
2898 case COND_EXPR:
2899 return fold_build3_loc (loc, code, type,
2900 eval_subst (loc, TREE_OPERAND (arg, 0),
2901 old0, new0, old1, new1),
2902 eval_subst (loc, TREE_OPERAND (arg, 1),
2903 old0, new0, old1, new1),
2904 eval_subst (loc, TREE_OPERAND (arg, 2),
2905 old0, new0, old1, new1));
2906 default:
2907 break;
2909 /* Fall through - ??? */
2911 case tcc_comparison:
2913 tree arg0 = TREE_OPERAND (arg, 0);
2914 tree arg1 = TREE_OPERAND (arg, 1);
2916 /* We need to check both for exact equality and tree equality. The
2917 former will be true if the operand has a side-effect. In that
2918 case, we know the operand occurred exactly once. */
2920 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2921 arg0 = new0;
2922 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2923 arg0 = new1;
2925 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2926 arg1 = new0;
2927 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2928 arg1 = new1;
2930 return fold_build2_loc (loc, code, type, arg0, arg1);
2933 default:
2934 return arg;
2938 /* Return a tree for the case when the result of an expression is RESULT
2939 converted to TYPE and OMITTED was previously an operand of the expression
2940 but is now not needed (e.g., we folded OMITTED * 0).
2942 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2943 the conversion of RESULT to TYPE. */
2945 tree
2946 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2948 tree t = fold_convert_loc (loc, type, result);
2950 /* If the resulting operand is an empty statement, just return the omitted
2951 statement casted to void. */
2952 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2953 return build1_loc (loc, NOP_EXPR, void_type_node,
2954 fold_ignored_result (omitted));
2956 if (TREE_SIDE_EFFECTS (omitted))
2957 return build2_loc (loc, COMPOUND_EXPR, type,
2958 fold_ignored_result (omitted), t);
2960 return non_lvalue_loc (loc, t);
2963 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2965 static tree
2966 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2967 tree omitted)
2969 tree t = fold_convert_loc (loc, type, result);
2971 /* If the resulting operand is an empty statement, just return the omitted
2972 statement casted to void. */
2973 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2974 return build1_loc (loc, NOP_EXPR, void_type_node,
2975 fold_ignored_result (omitted));
2977 if (TREE_SIDE_EFFECTS (omitted))
2978 return build2_loc (loc, COMPOUND_EXPR, type,
2979 fold_ignored_result (omitted), t);
2981 return pedantic_non_lvalue_loc (loc, t);
2984 /* Return a tree for the case when the result of an expression is RESULT
2985 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2986 of the expression but are now not needed.
2988 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2989 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2990 evaluated before OMITTED2. Otherwise, if neither has side effects,
2991 just do the conversion of RESULT to TYPE. */
2993 tree
2994 omit_two_operands_loc (location_t loc, tree type, tree result,
2995 tree omitted1, tree omitted2)
2997 tree t = fold_convert_loc (loc, type, result);
2999 if (TREE_SIDE_EFFECTS (omitted2))
3000 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3001 if (TREE_SIDE_EFFECTS (omitted1))
3002 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3004 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3008 /* Return a simplified tree node for the truth-negation of ARG. This
3009 never alters ARG itself. We assume that ARG is an operation that
3010 returns a truth value (0 or 1).
3012 FIXME: one would think we would fold the result, but it causes
3013 problems with the dominator optimizer. */
3015 tree
3016 fold_truth_not_expr (location_t loc, tree arg)
3018 tree type = TREE_TYPE (arg);
3019 enum tree_code code = TREE_CODE (arg);
3020 location_t loc1, loc2;
3022 /* If this is a comparison, we can simply invert it, except for
3023 floating-point non-equality comparisons, in which case we just
3024 enclose a TRUTH_NOT_EXPR around what we have. */
3026 if (TREE_CODE_CLASS (code) == tcc_comparison)
3028 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3029 if (FLOAT_TYPE_P (op_type)
3030 && flag_trapping_math
3031 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3032 && code != NE_EXPR && code != EQ_EXPR)
3033 return NULL_TREE;
3035 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3036 if (code == ERROR_MARK)
3037 return NULL_TREE;
3039 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3040 TREE_OPERAND (arg, 1));
3043 switch (code)
3045 case INTEGER_CST:
3046 return constant_boolean_node (integer_zerop (arg), type);
3048 case TRUTH_AND_EXPR:
3049 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3050 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3051 return build2_loc (loc, TRUTH_OR_EXPR, type,
3052 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3053 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3055 case TRUTH_OR_EXPR:
3056 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3057 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3058 return build2_loc (loc, TRUTH_AND_EXPR, type,
3059 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3060 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3062 case TRUTH_XOR_EXPR:
3063 /* Here we can invert either operand. We invert the first operand
3064 unless the second operand is a TRUTH_NOT_EXPR in which case our
3065 result is the XOR of the first operand with the inside of the
3066 negation of the second operand. */
3068 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3069 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3070 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3071 else
3072 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3073 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3074 TREE_OPERAND (arg, 1));
3076 case TRUTH_ANDIF_EXPR:
3077 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3078 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3079 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3080 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3081 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3083 case TRUTH_ORIF_EXPR:
3084 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3085 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3086 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3087 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3088 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3090 case TRUTH_NOT_EXPR:
3091 return TREE_OPERAND (arg, 0);
3093 case COND_EXPR:
3095 tree arg1 = TREE_OPERAND (arg, 1);
3096 tree arg2 = TREE_OPERAND (arg, 2);
3098 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3099 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3101 /* A COND_EXPR may have a throw as one operand, which
3102 then has void type. Just leave void operands
3103 as they are. */
3104 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3105 VOID_TYPE_P (TREE_TYPE (arg1))
3106 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3107 VOID_TYPE_P (TREE_TYPE (arg2))
3108 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3111 case COMPOUND_EXPR:
3112 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3113 return build2_loc (loc, COMPOUND_EXPR, type,
3114 TREE_OPERAND (arg, 0),
3115 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3117 case NON_LVALUE_EXPR:
3118 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3119 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3121 CASE_CONVERT:
3122 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3123 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3125 /* ... fall through ... */
3127 case FLOAT_EXPR:
3128 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3129 return build1_loc (loc, TREE_CODE (arg), type,
3130 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3132 case BIT_AND_EXPR:
3133 if (!integer_onep (TREE_OPERAND (arg, 1)))
3134 return NULL_TREE;
3135 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3137 case SAVE_EXPR:
3138 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3140 case CLEANUP_POINT_EXPR:
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3142 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3143 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3145 default:
3146 return NULL_TREE;
3150 /* Return a simplified tree node for the truth-negation of ARG. This
3151 never alters ARG itself. We assume that ARG is an operation that
3152 returns a truth value (0 or 1).
3154 FIXME: one would think we would fold the result, but it causes
3155 problems with the dominator optimizer. */
3157 tree
3158 invert_truthvalue_loc (location_t loc, tree arg)
3160 tree tem;
3162 if (TREE_CODE (arg) == ERROR_MARK)
3163 return arg;
3165 tem = fold_truth_not_expr (loc, arg);
3166 if (!tem)
3167 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3169 return tem;
3172 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3173 operands are another bit-wise operation with a common input. If so,
3174 distribute the bit operations to save an operation and possibly two if
3175 constants are involved. For example, convert
3176 (A | B) & (A | C) into A | (B & C)
3177 Further simplification will occur if B and C are constants.
3179 If this optimization cannot be done, 0 will be returned. */
3181 static tree
3182 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3183 tree arg0, tree arg1)
3185 tree common;
3186 tree left, right;
3188 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3189 || TREE_CODE (arg0) == code
3190 || (TREE_CODE (arg0) != BIT_AND_EXPR
3191 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3192 return 0;
3194 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3196 common = TREE_OPERAND (arg0, 0);
3197 left = TREE_OPERAND (arg0, 1);
3198 right = TREE_OPERAND (arg1, 1);
3200 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3202 common = TREE_OPERAND (arg0, 0);
3203 left = TREE_OPERAND (arg0, 1);
3204 right = TREE_OPERAND (arg1, 0);
3206 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3208 common = TREE_OPERAND (arg0, 1);
3209 left = TREE_OPERAND (arg0, 0);
3210 right = TREE_OPERAND (arg1, 1);
3212 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3214 common = TREE_OPERAND (arg0, 1);
3215 left = TREE_OPERAND (arg0, 0);
3216 right = TREE_OPERAND (arg1, 0);
3218 else
3219 return 0;
3221 common = fold_convert_loc (loc, type, common);
3222 left = fold_convert_loc (loc, type, left);
3223 right = fold_convert_loc (loc, type, right);
3224 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3225 fold_build2_loc (loc, code, type, left, right));
3228 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3229 with code CODE. This optimization is unsafe. */
3230 static tree
3231 distribute_real_division (location_t loc, enum tree_code code, tree type,
3232 tree arg0, tree arg1)
3234 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3235 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3237 /* (A / C) +- (B / C) -> (A +- B) / C. */
3238 if (mul0 == mul1
3239 && operand_equal_p (TREE_OPERAND (arg0, 1),
3240 TREE_OPERAND (arg1, 1), 0))
3241 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3242 fold_build2_loc (loc, code, type,
3243 TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0)),
3245 TREE_OPERAND (arg0, 1));
3247 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3248 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3249 TREE_OPERAND (arg1, 0), 0)
3250 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3251 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3253 REAL_VALUE_TYPE r0, r1;
3254 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3255 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3256 if (!mul0)
3257 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3258 if (!mul1)
3259 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3260 real_arithmetic (&r0, code, &r0, &r1);
3261 return fold_build2_loc (loc, MULT_EXPR, type,
3262 TREE_OPERAND (arg0, 0),
3263 build_real (type, r0));
3266 return NULL_TREE;
3269 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3270 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3272 static tree
3273 make_bit_field_ref (location_t loc, tree inner, tree type,
3274 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3276 tree result, bftype;
3278 if (bitpos == 0)
3280 tree size = TYPE_SIZE (TREE_TYPE (inner));
3281 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3282 || POINTER_TYPE_P (TREE_TYPE (inner)))
3283 && host_integerp (size, 0)
3284 && tree_low_cst (size, 0) == bitsize)
3285 return fold_convert_loc (loc, type, inner);
3288 bftype = type;
3289 if (TYPE_PRECISION (bftype) != bitsize
3290 || TYPE_UNSIGNED (bftype) == !unsignedp)
3291 bftype = build_nonstandard_integer_type (bitsize, 0);
3293 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3294 size_int (bitsize), bitsize_int (bitpos));
3296 if (bftype != type)
3297 result = fold_convert_loc (loc, type, result);
3299 return result;
3302 /* Optimize a bit-field compare.
3304 There are two cases: First is a compare against a constant and the
3305 second is a comparison of two items where the fields are at the same
3306 bit position relative to the start of a chunk (byte, halfword, word)
3307 large enough to contain it. In these cases we can avoid the shift
3308 implicit in bitfield extractions.
3310 For constants, we emit a compare of the shifted constant with the
3311 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3312 compared. For two fields at the same position, we do the ANDs with the
3313 similar mask and compare the result of the ANDs.
3315 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3316 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3317 are the left and right operands of the comparison, respectively.
3319 If the optimization described above can be done, we return the resulting
3320 tree. Otherwise we return zero. */
3322 static tree
3323 optimize_bit_field_compare (location_t loc, enum tree_code code,
3324 tree compare_type, tree lhs, tree rhs)
3326 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3327 tree type = TREE_TYPE (lhs);
3328 tree signed_type, unsigned_type;
3329 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3330 enum machine_mode lmode, rmode, nmode;
3331 int lunsignedp, runsignedp;
3332 int lvolatilep = 0, rvolatilep = 0;
3333 tree linner, rinner = NULL_TREE;
3334 tree mask;
3335 tree offset;
3337 /* In the strict volatile bitfields case, doing code changes here may prevent
3338 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3339 if (flag_strict_volatile_bitfields > 0)
3340 return 0;
3342 /* Get all the information about the extractions being done. If the bit size
3343 if the same as the size of the underlying object, we aren't doing an
3344 extraction at all and so can do nothing. We also don't want to
3345 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3346 then will no longer be able to replace it. */
3347 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3348 &lunsignedp, &lvolatilep, false);
3349 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3350 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3351 return 0;
3353 if (!const_p)
3355 /* If this is not a constant, we can only do something if bit positions,
3356 sizes, and signedness are the same. */
3357 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3358 &runsignedp, &rvolatilep, false);
3360 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3361 || lunsignedp != runsignedp || offset != 0
3362 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3363 return 0;
3366 /* See if we can find a mode to refer to this field. We should be able to,
3367 but fail if we can't. */
3368 if (lvolatilep
3369 && GET_MODE_BITSIZE (lmode) > 0
3370 && flag_strict_volatile_bitfields > 0)
3371 nmode = lmode;
3372 else
3373 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3374 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3375 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3376 TYPE_ALIGN (TREE_TYPE (rinner))),
3377 word_mode, lvolatilep || rvolatilep);
3378 if (nmode == VOIDmode)
3379 return 0;
3381 /* Set signed and unsigned types of the precision of this mode for the
3382 shifts below. */
3383 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3384 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3386 /* Compute the bit position and size for the new reference and our offset
3387 within it. If the new reference is the same size as the original, we
3388 won't optimize anything, so return zero. */
3389 nbitsize = GET_MODE_BITSIZE (nmode);
3390 nbitpos = lbitpos & ~ (nbitsize - 1);
3391 lbitpos -= nbitpos;
3392 if (nbitsize == lbitsize)
3393 return 0;
3395 if (BYTES_BIG_ENDIAN)
3396 lbitpos = nbitsize - lbitsize - lbitpos;
3398 /* Make the mask to be used against the extracted field. */
3399 mask = build_int_cst_type (unsigned_type, -1);
3400 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3401 mask = const_binop (RSHIFT_EXPR, mask,
3402 size_int (nbitsize - lbitsize - lbitpos));
3404 if (! const_p)
3405 /* If not comparing with constant, just rework the comparison
3406 and return. */
3407 return fold_build2_loc (loc, code, compare_type,
3408 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3409 make_bit_field_ref (loc, linner,
3410 unsigned_type,
3411 nbitsize, nbitpos,
3413 mask),
3414 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3415 make_bit_field_ref (loc, rinner,
3416 unsigned_type,
3417 nbitsize, nbitpos,
3419 mask));
3421 /* Otherwise, we are handling the constant case. See if the constant is too
3422 big for the field. Warn and return a tree of for 0 (false) if so. We do
3423 this not only for its own sake, but to avoid having to test for this
3424 error case below. If we didn't, we might generate wrong code.
3426 For unsigned fields, the constant shifted right by the field length should
3427 be all zero. For signed fields, the high-order bits should agree with
3428 the sign bit. */
3430 if (lunsignedp)
3432 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3433 fold_convert_loc (loc,
3434 unsigned_type, rhs),
3435 size_int (lbitsize))))
3437 warning (0, "comparison is always %d due to width of bit-field",
3438 code == NE_EXPR);
3439 return constant_boolean_node (code == NE_EXPR, compare_type);
3442 else
3444 tree tem = const_binop (RSHIFT_EXPR,
3445 fold_convert_loc (loc, signed_type, rhs),
3446 size_int (lbitsize - 1));
3447 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3449 warning (0, "comparison is always %d due to width of bit-field",
3450 code == NE_EXPR);
3451 return constant_boolean_node (code == NE_EXPR, compare_type);
3455 /* Single-bit compares should always be against zero. */
3456 if (lbitsize == 1 && ! integer_zerop (rhs))
3458 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3459 rhs = build_int_cst (type, 0);
3462 /* Make a new bitfield reference, shift the constant over the
3463 appropriate number of bits and mask it with the computed mask
3464 (in case this was a signed field). If we changed it, make a new one. */
3465 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3466 if (lvolatilep)
3468 TREE_SIDE_EFFECTS (lhs) = 1;
3469 TREE_THIS_VOLATILE (lhs) = 1;
3472 rhs = const_binop (BIT_AND_EXPR,
3473 const_binop (LSHIFT_EXPR,
3474 fold_convert_loc (loc, unsigned_type, rhs),
3475 size_int (lbitpos)),
3476 mask);
3478 lhs = build2_loc (loc, code, compare_type,
3479 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3480 return lhs;
3483 /* Subroutine for fold_truth_andor_1: decode a field reference.
3485 If EXP is a comparison reference, we return the innermost reference.
3487 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3488 set to the starting bit number.
3490 If the innermost field can be completely contained in a mode-sized
3491 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3493 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3494 otherwise it is not changed.
3496 *PUNSIGNEDP is set to the signedness of the field.
3498 *PMASK is set to the mask used. This is either contained in a
3499 BIT_AND_EXPR or derived from the width of the field.
3501 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3503 Return 0 if this is not a component reference or is one that we can't
3504 do anything with. */
3506 static tree
3507 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3508 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3509 int *punsignedp, int *pvolatilep,
3510 tree *pmask, tree *pand_mask)
3512 tree outer_type = 0;
3513 tree and_mask = 0;
3514 tree mask, inner, offset;
3515 tree unsigned_type;
3516 unsigned int precision;
3518 /* All the optimizations using this function assume integer fields.
3519 There are problems with FP fields since the type_for_size call
3520 below can fail for, e.g., XFmode. */
3521 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3522 return 0;
3524 /* We are interested in the bare arrangement of bits, so strip everything
3525 that doesn't affect the machine mode. However, record the type of the
3526 outermost expression if it may matter below. */
3527 if (CONVERT_EXPR_P (exp)
3528 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3529 outer_type = TREE_TYPE (exp);
3530 STRIP_NOPS (exp);
3532 if (TREE_CODE (exp) == BIT_AND_EXPR)
3534 and_mask = TREE_OPERAND (exp, 1);
3535 exp = TREE_OPERAND (exp, 0);
3536 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3537 if (TREE_CODE (and_mask) != INTEGER_CST)
3538 return 0;
3541 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3542 punsignedp, pvolatilep, false);
3543 if ((inner == exp && and_mask == 0)
3544 || *pbitsize < 0 || offset != 0
3545 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3546 return 0;
3548 /* If the number of bits in the reference is the same as the bitsize of
3549 the outer type, then the outer type gives the signedness. Otherwise
3550 (in case of a small bitfield) the signedness is unchanged. */
3551 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3552 *punsignedp = TYPE_UNSIGNED (outer_type);
3554 /* Compute the mask to access the bitfield. */
3555 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3556 precision = TYPE_PRECISION (unsigned_type);
3558 mask = build_int_cst_type (unsigned_type, -1);
3560 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3561 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3563 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3564 if (and_mask != 0)
3565 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3566 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3568 *pmask = mask;
3569 *pand_mask = and_mask;
3570 return inner;
3573 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3574 bit positions. */
3576 static int
3577 all_ones_mask_p (const_tree mask, int size)
3579 tree type = TREE_TYPE (mask);
3580 unsigned int precision = TYPE_PRECISION (type);
3581 tree tmask;
3583 tmask = build_int_cst_type (signed_type_for (type), -1);
3585 return
3586 tree_int_cst_equal (mask,
3587 const_binop (RSHIFT_EXPR,
3588 const_binop (LSHIFT_EXPR, tmask,
3589 size_int (precision - size)),
3590 size_int (precision - size)));
3593 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3594 represents the sign bit of EXP's type. If EXP represents a sign
3595 or zero extension, also test VAL against the unextended type.
3596 The return value is the (sub)expression whose sign bit is VAL,
3597 or NULL_TREE otherwise. */
3599 static tree
3600 sign_bit_p (tree exp, const_tree val)
3602 unsigned HOST_WIDE_INT mask_lo, lo;
3603 HOST_WIDE_INT mask_hi, hi;
3604 int width;
3605 tree t;
3607 /* Tree EXP must have an integral type. */
3608 t = TREE_TYPE (exp);
3609 if (! INTEGRAL_TYPE_P (t))
3610 return NULL_TREE;
3612 /* Tree VAL must be an integer constant. */
3613 if (TREE_CODE (val) != INTEGER_CST
3614 || TREE_OVERFLOW (val))
3615 return NULL_TREE;
3617 width = TYPE_PRECISION (t);
3618 if (width > HOST_BITS_PER_WIDE_INT)
3620 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3621 lo = 0;
3623 mask_hi = ((unsigned HOST_WIDE_INT) -1
3624 >> (HOST_BITS_PER_DOUBLE_INT - width));
3625 mask_lo = -1;
3627 else
3629 hi = 0;
3630 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3632 mask_hi = 0;
3633 mask_lo = ((unsigned HOST_WIDE_INT) -1
3634 >> (HOST_BITS_PER_WIDE_INT - width));
3637 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3638 treat VAL as if it were unsigned. */
3639 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3640 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3641 return exp;
3643 /* Handle extension from a narrower type. */
3644 if (TREE_CODE (exp) == NOP_EXPR
3645 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3646 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3648 return NULL_TREE;
3651 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3652 to be evaluated unconditionally. */
3654 static int
3655 simple_operand_p (const_tree exp)
3657 /* Strip any conversions that don't change the machine mode. */
3658 STRIP_NOPS (exp);
3660 return (CONSTANT_CLASS_P (exp)
3661 || TREE_CODE (exp) == SSA_NAME
3662 || (DECL_P (exp)
3663 && ! TREE_ADDRESSABLE (exp)
3664 && ! TREE_THIS_VOLATILE (exp)
3665 && ! DECL_NONLOCAL (exp)
3666 /* Don't regard global variables as simple. They may be
3667 allocated in ways unknown to the compiler (shared memory,
3668 #pragma weak, etc). */
3669 && ! TREE_PUBLIC (exp)
3670 && ! DECL_EXTERNAL (exp)
3671 /* Loading a static variable is unduly expensive, but global
3672 registers aren't expensive. */
3673 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3676 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3677 to be evaluated unconditionally.
3678 I addition to simple_operand_p, we assume that comparisons, conversions,
3679 and logic-not operations are simple, if their operands are simple, too. */
3681 static bool
3682 simple_operand_p_2 (tree exp)
3684 enum tree_code code;
3686 if (TREE_SIDE_EFFECTS (exp)
3687 || tree_could_trap_p (exp))
3688 return false;
3690 while (CONVERT_EXPR_P (exp))
3691 exp = TREE_OPERAND (exp, 0);
3693 code = TREE_CODE (exp);
3695 if (TREE_CODE_CLASS (code) == tcc_comparison)
3696 return (simple_operand_p (TREE_OPERAND (exp, 0))
3697 && simple_operand_p (TREE_OPERAND (exp, 1)));
3699 if (code == TRUTH_NOT_EXPR)
3700 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3702 return simple_operand_p (exp);
3706 /* The following functions are subroutines to fold_range_test and allow it to
3707 try to change a logical combination of comparisons into a range test.
3709 For example, both
3710 X == 2 || X == 3 || X == 4 || X == 5
3712 X >= 2 && X <= 5
3713 are converted to
3714 (unsigned) (X - 2) <= 3
3716 We describe each set of comparisons as being either inside or outside
3717 a range, using a variable named like IN_P, and then describe the
3718 range with a lower and upper bound. If one of the bounds is omitted,
3719 it represents either the highest or lowest value of the type.
3721 In the comments below, we represent a range by two numbers in brackets
3722 preceded by a "+" to designate being inside that range, or a "-" to
3723 designate being outside that range, so the condition can be inverted by
3724 flipping the prefix. An omitted bound is represented by a "-". For
3725 example, "- [-, 10]" means being outside the range starting at the lowest
3726 possible value and ending at 10, in other words, being greater than 10.
3727 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3728 always false.
3730 We set up things so that the missing bounds are handled in a consistent
3731 manner so neither a missing bound nor "true" and "false" need to be
3732 handled using a special case. */
3734 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3735 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3736 and UPPER1_P are nonzero if the respective argument is an upper bound
3737 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3738 must be specified for a comparison. ARG1 will be converted to ARG0's
3739 type if both are specified. */
3741 static tree
3742 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3743 tree arg1, int upper1_p)
3745 tree tem;
3746 int result;
3747 int sgn0, sgn1;
3749 /* If neither arg represents infinity, do the normal operation.
3750 Else, if not a comparison, return infinity. Else handle the special
3751 comparison rules. Note that most of the cases below won't occur, but
3752 are handled for consistency. */
3754 if (arg0 != 0 && arg1 != 0)
3756 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3757 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3758 STRIP_NOPS (tem);
3759 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3762 if (TREE_CODE_CLASS (code) != tcc_comparison)
3763 return 0;
3765 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3766 for neither. In real maths, we cannot assume open ended ranges are
3767 the same. But, this is computer arithmetic, where numbers are finite.
3768 We can therefore make the transformation of any unbounded range with
3769 the value Z, Z being greater than any representable number. This permits
3770 us to treat unbounded ranges as equal. */
3771 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3772 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3773 switch (code)
3775 case EQ_EXPR:
3776 result = sgn0 == sgn1;
3777 break;
3778 case NE_EXPR:
3779 result = sgn0 != sgn1;
3780 break;
3781 case LT_EXPR:
3782 result = sgn0 < sgn1;
3783 break;
3784 case LE_EXPR:
3785 result = sgn0 <= sgn1;
3786 break;
3787 case GT_EXPR:
3788 result = sgn0 > sgn1;
3789 break;
3790 case GE_EXPR:
3791 result = sgn0 >= sgn1;
3792 break;
3793 default:
3794 gcc_unreachable ();
3797 return constant_boolean_node (result, type);
3800 /* Helper routine for make_range. Perform one step for it, return
3801 new expression if the loop should continue or NULL_TREE if it should
3802 stop. */
3804 tree
3805 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3806 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3807 bool *strict_overflow_p)
3809 tree arg0_type = TREE_TYPE (arg0);
3810 tree n_low, n_high, low = *p_low, high = *p_high;
3811 int in_p = *p_in_p, n_in_p;
3813 switch (code)
3815 case TRUTH_NOT_EXPR:
3816 *p_in_p = ! in_p;
3817 return arg0;
3819 case EQ_EXPR: case NE_EXPR:
3820 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3821 /* We can only do something if the range is testing for zero
3822 and if the second operand is an integer constant. Note that
3823 saying something is "in" the range we make is done by
3824 complementing IN_P since it will set in the initial case of
3825 being not equal to zero; "out" is leaving it alone. */
3826 if (low == NULL_TREE || high == NULL_TREE
3827 || ! integer_zerop (low) || ! integer_zerop (high)
3828 || TREE_CODE (arg1) != INTEGER_CST)
3829 return NULL_TREE;
3831 switch (code)
3833 case NE_EXPR: /* - [c, c] */
3834 low = high = arg1;
3835 break;
3836 case EQ_EXPR: /* + [c, c] */
3837 in_p = ! in_p, low = high = arg1;
3838 break;
3839 case GT_EXPR: /* - [-, c] */
3840 low = 0, high = arg1;
3841 break;
3842 case GE_EXPR: /* + [c, -] */
3843 in_p = ! in_p, low = arg1, high = 0;
3844 break;
3845 case LT_EXPR: /* - [c, -] */
3846 low = arg1, high = 0;
3847 break;
3848 case LE_EXPR: /* + [-, c] */
3849 in_p = ! in_p, low = 0, high = arg1;
3850 break;
3851 default:
3852 gcc_unreachable ();
3855 /* If this is an unsigned comparison, we also know that EXP is
3856 greater than or equal to zero. We base the range tests we make
3857 on that fact, so we record it here so we can parse existing
3858 range tests. We test arg0_type since often the return type
3859 of, e.g. EQ_EXPR, is boolean. */
3860 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3862 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3863 in_p, low, high, 1,
3864 build_int_cst (arg0_type, 0),
3865 NULL_TREE))
3866 return NULL_TREE;
3868 in_p = n_in_p, low = n_low, high = n_high;
3870 /* If the high bound is missing, but we have a nonzero low
3871 bound, reverse the range so it goes from zero to the low bound
3872 minus 1. */
3873 if (high == 0 && low && ! integer_zerop (low))
3875 in_p = ! in_p;
3876 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3877 integer_one_node, 0);
3878 low = build_int_cst (arg0_type, 0);
3882 *p_low = low;
3883 *p_high = high;
3884 *p_in_p = in_p;
3885 return arg0;
3887 case NEGATE_EXPR:
3888 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3889 low and high are non-NULL, then normalize will DTRT. */
3890 if (!TYPE_UNSIGNED (arg0_type)
3891 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3893 if (low == NULL_TREE)
3894 low = TYPE_MIN_VALUE (arg0_type);
3895 if (high == NULL_TREE)
3896 high = TYPE_MAX_VALUE (arg0_type);
3899 /* (-x) IN [a,b] -> x in [-b, -a] */
3900 n_low = range_binop (MINUS_EXPR, exp_type,
3901 build_int_cst (exp_type, 0),
3902 0, high, 1);
3903 n_high = range_binop (MINUS_EXPR, exp_type,
3904 build_int_cst (exp_type, 0),
3905 0, low, 0);
3906 if (n_high != 0 && TREE_OVERFLOW (n_high))
3907 return NULL_TREE;
3908 goto normalize;
3910 case BIT_NOT_EXPR:
3911 /* ~ X -> -X - 1 */
3912 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3913 build_int_cst (exp_type, 1));
3915 case PLUS_EXPR:
3916 case MINUS_EXPR:
3917 if (TREE_CODE (arg1) != INTEGER_CST)
3918 return NULL_TREE;
3920 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3921 move a constant to the other side. */
3922 if (!TYPE_UNSIGNED (arg0_type)
3923 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3924 return NULL_TREE;
3926 /* If EXP is signed, any overflow in the computation is undefined,
3927 so we don't worry about it so long as our computations on
3928 the bounds don't overflow. For unsigned, overflow is defined
3929 and this is exactly the right thing. */
3930 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3931 arg0_type, low, 0, arg1, 0);
3932 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3933 arg0_type, high, 1, arg1, 0);
3934 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3935 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3936 return NULL_TREE;
3938 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3939 *strict_overflow_p = true;
3941 normalize:
3942 /* Check for an unsigned range which has wrapped around the maximum
3943 value thus making n_high < n_low, and normalize it. */
3944 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3946 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3947 integer_one_node, 0);
3948 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3949 integer_one_node, 0);
3951 /* If the range is of the form +/- [ x+1, x ], we won't
3952 be able to normalize it. But then, it represents the
3953 whole range or the empty set, so make it
3954 +/- [ -, - ]. */
3955 if (tree_int_cst_equal (n_low, low)
3956 && tree_int_cst_equal (n_high, high))
3957 low = high = 0;
3958 else
3959 in_p = ! in_p;
3961 else
3962 low = n_low, high = n_high;
3964 *p_low = low;
3965 *p_high = high;
3966 *p_in_p = in_p;
3967 return arg0;
3969 CASE_CONVERT:
3970 case NON_LVALUE_EXPR:
3971 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3972 return NULL_TREE;
3974 if (! INTEGRAL_TYPE_P (arg0_type)
3975 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3976 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3977 return NULL_TREE;
3979 n_low = low, n_high = high;
3981 if (n_low != 0)
3982 n_low = fold_convert_loc (loc, arg0_type, n_low);
3984 if (n_high != 0)
3985 n_high = fold_convert_loc (loc, arg0_type, n_high);
3987 /* If we're converting arg0 from an unsigned type, to exp,
3988 a signed type, we will be doing the comparison as unsigned.
3989 The tests above have already verified that LOW and HIGH
3990 are both positive.
3992 So we have to ensure that we will handle large unsigned
3993 values the same way that the current signed bounds treat
3994 negative values. */
3996 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3998 tree high_positive;
3999 tree equiv_type;
4000 /* For fixed-point modes, we need to pass the saturating flag
4001 as the 2nd parameter. */
4002 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4003 equiv_type
4004 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4005 TYPE_SATURATING (arg0_type));
4006 else
4007 equiv_type
4008 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4010 /* A range without an upper bound is, naturally, unbounded.
4011 Since convert would have cropped a very large value, use
4012 the max value for the destination type. */
4013 high_positive
4014 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4015 : TYPE_MAX_VALUE (arg0_type);
4017 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4018 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4019 fold_convert_loc (loc, arg0_type,
4020 high_positive),
4021 build_int_cst (arg0_type, 1));
4023 /* If the low bound is specified, "and" the range with the
4024 range for which the original unsigned value will be
4025 positive. */
4026 if (low != 0)
4028 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4029 1, fold_convert_loc (loc, arg0_type,
4030 integer_zero_node),
4031 high_positive))
4032 return NULL_TREE;
4034 in_p = (n_in_p == in_p);
4036 else
4038 /* Otherwise, "or" the range with the range of the input
4039 that will be interpreted as negative. */
4040 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4041 1, fold_convert_loc (loc, arg0_type,
4042 integer_zero_node),
4043 high_positive))
4044 return NULL_TREE;
4046 in_p = (in_p != n_in_p);
4050 *p_low = n_low;
4051 *p_high = n_high;
4052 *p_in_p = in_p;
4053 return arg0;
4055 default:
4056 return NULL_TREE;
4060 /* Given EXP, a logical expression, set the range it is testing into
4061 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4062 actually being tested. *PLOW and *PHIGH will be made of the same
4063 type as the returned expression. If EXP is not a comparison, we
4064 will most likely not be returning a useful value and range. Set
4065 *STRICT_OVERFLOW_P to true if the return value is only valid
4066 because signed overflow is undefined; otherwise, do not change
4067 *STRICT_OVERFLOW_P. */
4069 tree
4070 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4071 bool *strict_overflow_p)
4073 enum tree_code code;
4074 tree arg0, arg1 = NULL_TREE;
4075 tree exp_type, nexp;
4076 int in_p;
4077 tree low, high;
4078 location_t loc = EXPR_LOCATION (exp);
4080 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4081 and see if we can refine the range. Some of the cases below may not
4082 happen, but it doesn't seem worth worrying about this. We "continue"
4083 the outer loop when we've changed something; otherwise we "break"
4084 the switch, which will "break" the while. */
4086 in_p = 0;
4087 low = high = build_int_cst (TREE_TYPE (exp), 0);
4089 while (1)
4091 code = TREE_CODE (exp);
4092 exp_type = TREE_TYPE (exp);
4093 arg0 = NULL_TREE;
4095 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4097 if (TREE_OPERAND_LENGTH (exp) > 0)
4098 arg0 = TREE_OPERAND (exp, 0);
4099 if (TREE_CODE_CLASS (code) == tcc_binary
4100 || TREE_CODE_CLASS (code) == tcc_comparison
4101 || (TREE_CODE_CLASS (code) == tcc_expression
4102 && TREE_OPERAND_LENGTH (exp) > 1))
4103 arg1 = TREE_OPERAND (exp, 1);
4105 if (arg0 == NULL_TREE)
4106 break;
4108 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4109 &high, &in_p, strict_overflow_p);
4110 if (nexp == NULL_TREE)
4111 break;
4112 exp = nexp;
4115 /* If EXP is a constant, we can evaluate whether this is true or false. */
4116 if (TREE_CODE (exp) == INTEGER_CST)
4118 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4119 exp, 0, low, 0))
4120 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4121 exp, 1, high, 1)));
4122 low = high = 0;
4123 exp = 0;
4126 *pin_p = in_p, *plow = low, *phigh = high;
4127 return exp;
4130 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4131 type, TYPE, return an expression to test if EXP is in (or out of, depending
4132 on IN_P) the range. Return 0 if the test couldn't be created. */
4134 tree
4135 build_range_check (location_t loc, tree type, tree exp, int in_p,
4136 tree low, tree high)
4138 tree etype = TREE_TYPE (exp), value;
4140 #ifdef HAVE_canonicalize_funcptr_for_compare
4141 /* Disable this optimization for function pointer expressions
4142 on targets that require function pointer canonicalization. */
4143 if (HAVE_canonicalize_funcptr_for_compare
4144 && TREE_CODE (etype) == POINTER_TYPE
4145 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4146 return NULL_TREE;
4147 #endif
4149 if (! in_p)
4151 value = build_range_check (loc, type, exp, 1, low, high);
4152 if (value != 0)
4153 return invert_truthvalue_loc (loc, value);
4155 return 0;
4158 if (low == 0 && high == 0)
4159 return build_int_cst (type, 1);
4161 if (low == 0)
4162 return fold_build2_loc (loc, LE_EXPR, type, exp,
4163 fold_convert_loc (loc, etype, high));
4165 if (high == 0)
4166 return fold_build2_loc (loc, GE_EXPR, type, exp,
4167 fold_convert_loc (loc, etype, low));
4169 if (operand_equal_p (low, high, 0))
4170 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4171 fold_convert_loc (loc, etype, low));
4173 if (integer_zerop (low))
4175 if (! TYPE_UNSIGNED (etype))
4177 etype = unsigned_type_for (etype);
4178 high = fold_convert_loc (loc, etype, high);
4179 exp = fold_convert_loc (loc, etype, exp);
4181 return build_range_check (loc, type, exp, 1, 0, high);
4184 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4185 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4187 unsigned HOST_WIDE_INT lo;
4188 HOST_WIDE_INT hi;
4189 int prec;
4191 prec = TYPE_PRECISION (etype);
4192 if (prec <= HOST_BITS_PER_WIDE_INT)
4194 hi = 0;
4195 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4197 else
4199 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4200 lo = (unsigned HOST_WIDE_INT) -1;
4203 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4205 if (TYPE_UNSIGNED (etype))
4207 tree signed_etype = signed_type_for (etype);
4208 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4209 etype
4210 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4211 else
4212 etype = signed_etype;
4213 exp = fold_convert_loc (loc, etype, exp);
4215 return fold_build2_loc (loc, GT_EXPR, type, exp,
4216 build_int_cst (etype, 0));
4220 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4221 This requires wrap-around arithmetics for the type of the expression.
4222 First make sure that arithmetics in this type is valid, then make sure
4223 that it wraps around. */
4224 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4225 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4226 TYPE_UNSIGNED (etype));
4228 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4230 tree utype, minv, maxv;
4232 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4233 for the type in question, as we rely on this here. */
4234 utype = unsigned_type_for (etype);
4235 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4236 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4237 integer_one_node, 1);
4238 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4240 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4241 minv, 1, maxv, 1)))
4242 etype = utype;
4243 else
4244 return 0;
4247 high = fold_convert_loc (loc, etype, high);
4248 low = fold_convert_loc (loc, etype, low);
4249 exp = fold_convert_loc (loc, etype, exp);
4251 value = const_binop (MINUS_EXPR, high, low);
4254 if (POINTER_TYPE_P (etype))
4256 if (value != 0 && !TREE_OVERFLOW (value))
4258 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4259 return build_range_check (loc, type,
4260 fold_build_pointer_plus_loc (loc, exp, low),
4261 1, build_int_cst (etype, 0), value);
4263 return 0;
4266 if (value != 0 && !TREE_OVERFLOW (value))
4267 return build_range_check (loc, type,
4268 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4269 1, build_int_cst (etype, 0), value);
4271 return 0;
4274 /* Return the predecessor of VAL in its type, handling the infinite case. */
4276 static tree
4277 range_predecessor (tree val)
4279 tree type = TREE_TYPE (val);
4281 if (INTEGRAL_TYPE_P (type)
4282 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4283 return 0;
4284 else
4285 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4288 /* Return the successor of VAL in its type, handling the infinite case. */
4290 static tree
4291 range_successor (tree val)
4293 tree type = TREE_TYPE (val);
4295 if (INTEGRAL_TYPE_P (type)
4296 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4297 return 0;
4298 else
4299 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4302 /* Given two ranges, see if we can merge them into one. Return 1 if we
4303 can, 0 if we can't. Set the output range into the specified parameters. */
4305 bool
4306 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4307 tree high0, int in1_p, tree low1, tree high1)
4309 int no_overlap;
4310 int subset;
4311 int temp;
4312 tree tem;
4313 int in_p;
4314 tree low, high;
4315 int lowequal = ((low0 == 0 && low1 == 0)
4316 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4317 low0, 0, low1, 0)));
4318 int highequal = ((high0 == 0 && high1 == 0)
4319 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4320 high0, 1, high1, 1)));
4322 /* Make range 0 be the range that starts first, or ends last if they
4323 start at the same value. Swap them if it isn't. */
4324 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4325 low0, 0, low1, 0))
4326 || (lowequal
4327 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4328 high1, 1, high0, 1))))
4330 temp = in0_p, in0_p = in1_p, in1_p = temp;
4331 tem = low0, low0 = low1, low1 = tem;
4332 tem = high0, high0 = high1, high1 = tem;
4335 /* Now flag two cases, whether the ranges are disjoint or whether the
4336 second range is totally subsumed in the first. Note that the tests
4337 below are simplified by the ones above. */
4338 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4339 high0, 1, low1, 0));
4340 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4341 high1, 1, high0, 1));
4343 /* We now have four cases, depending on whether we are including or
4344 excluding the two ranges. */
4345 if (in0_p && in1_p)
4347 /* If they don't overlap, the result is false. If the second range
4348 is a subset it is the result. Otherwise, the range is from the start
4349 of the second to the end of the first. */
4350 if (no_overlap)
4351 in_p = 0, low = high = 0;
4352 else if (subset)
4353 in_p = 1, low = low1, high = high1;
4354 else
4355 in_p = 1, low = low1, high = high0;
4358 else if (in0_p && ! in1_p)
4360 /* If they don't overlap, the result is the first range. If they are
4361 equal, the result is false. If the second range is a subset of the
4362 first, and the ranges begin at the same place, we go from just after
4363 the end of the second range to the end of the first. If the second
4364 range is not a subset of the first, or if it is a subset and both
4365 ranges end at the same place, the range starts at the start of the
4366 first range and ends just before the second range.
4367 Otherwise, we can't describe this as a single range. */
4368 if (no_overlap)
4369 in_p = 1, low = low0, high = high0;
4370 else if (lowequal && highequal)
4371 in_p = 0, low = high = 0;
4372 else if (subset && lowequal)
4374 low = range_successor (high1);
4375 high = high0;
4376 in_p = 1;
4377 if (low == 0)
4379 /* We are in the weird situation where high0 > high1 but
4380 high1 has no successor. Punt. */
4381 return 0;
4384 else if (! subset || highequal)
4386 low = low0;
4387 high = range_predecessor (low1);
4388 in_p = 1;
4389 if (high == 0)
4391 /* low0 < low1 but low1 has no predecessor. Punt. */
4392 return 0;
4395 else
4396 return 0;
4399 else if (! in0_p && in1_p)
4401 /* If they don't overlap, the result is the second range. If the second
4402 is a subset of the first, the result is false. Otherwise,
4403 the range starts just after the first range and ends at the
4404 end of the second. */
4405 if (no_overlap)
4406 in_p = 1, low = low1, high = high1;
4407 else if (subset || highequal)
4408 in_p = 0, low = high = 0;
4409 else
4411 low = range_successor (high0);
4412 high = high1;
4413 in_p = 1;
4414 if (low == 0)
4416 /* high1 > high0 but high0 has no successor. Punt. */
4417 return 0;
4422 else
4424 /* The case where we are excluding both ranges. Here the complex case
4425 is if they don't overlap. In that case, the only time we have a
4426 range is if they are adjacent. If the second is a subset of the
4427 first, the result is the first. Otherwise, the range to exclude
4428 starts at the beginning of the first range and ends at the end of the
4429 second. */
4430 if (no_overlap)
4432 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4433 range_successor (high0),
4434 1, low1, 0)))
4435 in_p = 0, low = low0, high = high1;
4436 else
4438 /* Canonicalize - [min, x] into - [-, x]. */
4439 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4440 switch (TREE_CODE (TREE_TYPE (low0)))
4442 case ENUMERAL_TYPE:
4443 if (TYPE_PRECISION (TREE_TYPE (low0))
4444 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4445 break;
4446 /* FALLTHROUGH */
4447 case INTEGER_TYPE:
4448 if (tree_int_cst_equal (low0,
4449 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4450 low0 = 0;
4451 break;
4452 case POINTER_TYPE:
4453 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4454 && integer_zerop (low0))
4455 low0 = 0;
4456 break;
4457 default:
4458 break;
4461 /* Canonicalize - [x, max] into - [x, -]. */
4462 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4463 switch (TREE_CODE (TREE_TYPE (high1)))
4465 case ENUMERAL_TYPE:
4466 if (TYPE_PRECISION (TREE_TYPE (high1))
4467 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4468 break;
4469 /* FALLTHROUGH */
4470 case INTEGER_TYPE:
4471 if (tree_int_cst_equal (high1,
4472 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4473 high1 = 0;
4474 break;
4475 case POINTER_TYPE:
4476 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4477 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4478 high1, 1,
4479 integer_one_node, 1)))
4480 high1 = 0;
4481 break;
4482 default:
4483 break;
4486 /* The ranges might be also adjacent between the maximum and
4487 minimum values of the given type. For
4488 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4489 return + [x + 1, y - 1]. */
4490 if (low0 == 0 && high1 == 0)
4492 low = range_successor (high0);
4493 high = range_predecessor (low1);
4494 if (low == 0 || high == 0)
4495 return 0;
4497 in_p = 1;
4499 else
4500 return 0;
4503 else if (subset)
4504 in_p = 0, low = low0, high = high0;
4505 else
4506 in_p = 0, low = low0, high = high1;
4509 *pin_p = in_p, *plow = low, *phigh = high;
4510 return 1;
4514 /* Subroutine of fold, looking inside expressions of the form
4515 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4516 of the COND_EXPR. This function is being used also to optimize
4517 A op B ? C : A, by reversing the comparison first.
4519 Return a folded expression whose code is not a COND_EXPR
4520 anymore, or NULL_TREE if no folding opportunity is found. */
4522 static tree
4523 fold_cond_expr_with_comparison (location_t loc, tree type,
4524 tree arg0, tree arg1, tree arg2)
4526 enum tree_code comp_code = TREE_CODE (arg0);
4527 tree arg00 = TREE_OPERAND (arg0, 0);
4528 tree arg01 = TREE_OPERAND (arg0, 1);
4529 tree arg1_type = TREE_TYPE (arg1);
4530 tree tem;
4532 STRIP_NOPS (arg1);
4533 STRIP_NOPS (arg2);
4535 /* If we have A op 0 ? A : -A, consider applying the following
4536 transformations:
4538 A == 0? A : -A same as -A
4539 A != 0? A : -A same as A
4540 A >= 0? A : -A same as abs (A)
4541 A > 0? A : -A same as abs (A)
4542 A <= 0? A : -A same as -abs (A)
4543 A < 0? A : -A same as -abs (A)
4545 None of these transformations work for modes with signed
4546 zeros. If A is +/-0, the first two transformations will
4547 change the sign of the result (from +0 to -0, or vice
4548 versa). The last four will fix the sign of the result,
4549 even though the original expressions could be positive or
4550 negative, depending on the sign of A.
4552 Note that all these transformations are correct if A is
4553 NaN, since the two alternatives (A and -A) are also NaNs. */
4554 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4555 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4556 ? real_zerop (arg01)
4557 : integer_zerop (arg01))
4558 && ((TREE_CODE (arg2) == NEGATE_EXPR
4559 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4560 /* In the case that A is of the form X-Y, '-A' (arg2) may
4561 have already been folded to Y-X, check for that. */
4562 || (TREE_CODE (arg1) == MINUS_EXPR
4563 && TREE_CODE (arg2) == MINUS_EXPR
4564 && operand_equal_p (TREE_OPERAND (arg1, 0),
4565 TREE_OPERAND (arg2, 1), 0)
4566 && operand_equal_p (TREE_OPERAND (arg1, 1),
4567 TREE_OPERAND (arg2, 0), 0))))
4568 switch (comp_code)
4570 case EQ_EXPR:
4571 case UNEQ_EXPR:
4572 tem = fold_convert_loc (loc, arg1_type, arg1);
4573 return pedantic_non_lvalue_loc (loc,
4574 fold_convert_loc (loc, type,
4575 negate_expr (tem)));
4576 case NE_EXPR:
4577 case LTGT_EXPR:
4578 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4579 case UNGE_EXPR:
4580 case UNGT_EXPR:
4581 if (flag_trapping_math)
4582 break;
4583 /* Fall through. */
4584 case GE_EXPR:
4585 case GT_EXPR:
4586 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4587 arg1 = fold_convert_loc (loc, signed_type_for
4588 (TREE_TYPE (arg1)), arg1);
4589 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4590 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4591 case UNLE_EXPR:
4592 case UNLT_EXPR:
4593 if (flag_trapping_math)
4594 break;
4595 case LE_EXPR:
4596 case LT_EXPR:
4597 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4598 arg1 = fold_convert_loc (loc, signed_type_for
4599 (TREE_TYPE (arg1)), arg1);
4600 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4601 return negate_expr (fold_convert_loc (loc, type, tem));
4602 default:
4603 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4604 break;
4607 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4608 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4609 both transformations are correct when A is NaN: A != 0
4610 is then true, and A == 0 is false. */
4612 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4613 && integer_zerop (arg01) && integer_zerop (arg2))
4615 if (comp_code == NE_EXPR)
4616 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4617 else if (comp_code == EQ_EXPR)
4618 return build_int_cst (type, 0);
4621 /* Try some transformations of A op B ? A : B.
4623 A == B? A : B same as B
4624 A != B? A : B same as A
4625 A >= B? A : B same as max (A, B)
4626 A > B? A : B same as max (B, A)
4627 A <= B? A : B same as min (A, B)
4628 A < B? A : B same as min (B, A)
4630 As above, these transformations don't work in the presence
4631 of signed zeros. For example, if A and B are zeros of
4632 opposite sign, the first two transformations will change
4633 the sign of the result. In the last four, the original
4634 expressions give different results for (A=+0, B=-0) and
4635 (A=-0, B=+0), but the transformed expressions do not.
4637 The first two transformations are correct if either A or B
4638 is a NaN. In the first transformation, the condition will
4639 be false, and B will indeed be chosen. In the case of the
4640 second transformation, the condition A != B will be true,
4641 and A will be chosen.
4643 The conversions to max() and min() are not correct if B is
4644 a number and A is not. The conditions in the original
4645 expressions will be false, so all four give B. The min()
4646 and max() versions would give a NaN instead. */
4647 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4648 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4649 /* Avoid these transformations if the COND_EXPR may be used
4650 as an lvalue in the C++ front-end. PR c++/19199. */
4651 && (in_gimple_form
4652 || (strcmp (lang_hooks.name, "GNU C++") != 0
4653 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4654 || ! maybe_lvalue_p (arg1)
4655 || ! maybe_lvalue_p (arg2)))
4657 tree comp_op0 = arg00;
4658 tree comp_op1 = arg01;
4659 tree comp_type = TREE_TYPE (comp_op0);
4661 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4662 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4664 comp_type = type;
4665 comp_op0 = arg1;
4666 comp_op1 = arg2;
4669 switch (comp_code)
4671 case EQ_EXPR:
4672 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4673 case NE_EXPR:
4674 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4675 case LE_EXPR:
4676 case LT_EXPR:
4677 case UNLE_EXPR:
4678 case UNLT_EXPR:
4679 /* In C++ a ?: expression can be an lvalue, so put the
4680 operand which will be used if they are equal first
4681 so that we can convert this back to the
4682 corresponding COND_EXPR. */
4683 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4685 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4686 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4687 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4688 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4689 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4690 comp_op1, comp_op0);
4691 return pedantic_non_lvalue_loc (loc,
4692 fold_convert_loc (loc, type, tem));
4694 break;
4695 case GE_EXPR:
4696 case GT_EXPR:
4697 case UNGE_EXPR:
4698 case UNGT_EXPR:
4699 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4701 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4702 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4703 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4704 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4705 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4706 comp_op1, comp_op0);
4707 return pedantic_non_lvalue_loc (loc,
4708 fold_convert_loc (loc, type, tem));
4710 break;
4711 case UNEQ_EXPR:
4712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4713 return pedantic_non_lvalue_loc (loc,
4714 fold_convert_loc (loc, type, arg2));
4715 break;
4716 case LTGT_EXPR:
4717 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4718 return pedantic_non_lvalue_loc (loc,
4719 fold_convert_loc (loc, type, arg1));
4720 break;
4721 default:
4722 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4723 break;
4727 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4728 we might still be able to simplify this. For example,
4729 if C1 is one less or one more than C2, this might have started
4730 out as a MIN or MAX and been transformed by this function.
4731 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4733 if (INTEGRAL_TYPE_P (type)
4734 && TREE_CODE (arg01) == INTEGER_CST
4735 && TREE_CODE (arg2) == INTEGER_CST)
4736 switch (comp_code)
4738 case EQ_EXPR:
4739 if (TREE_CODE (arg1) == INTEGER_CST)
4740 break;
4741 /* We can replace A with C1 in this case. */
4742 arg1 = fold_convert_loc (loc, type, arg01);
4743 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4745 case LT_EXPR:
4746 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4747 MIN_EXPR, to preserve the signedness of the comparison. */
4748 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4749 OEP_ONLY_CONST)
4750 && operand_equal_p (arg01,
4751 const_binop (PLUS_EXPR, arg2,
4752 build_int_cst (type, 1)),
4753 OEP_ONLY_CONST))
4755 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4756 fold_convert_loc (loc, TREE_TYPE (arg00),
4757 arg2));
4758 return pedantic_non_lvalue_loc (loc,
4759 fold_convert_loc (loc, type, tem));
4761 break;
4763 case LE_EXPR:
4764 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4765 as above. */
4766 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4767 OEP_ONLY_CONST)
4768 && operand_equal_p (arg01,
4769 const_binop (MINUS_EXPR, arg2,
4770 build_int_cst (type, 1)),
4771 OEP_ONLY_CONST))
4773 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4774 fold_convert_loc (loc, TREE_TYPE (arg00),
4775 arg2));
4776 return pedantic_non_lvalue_loc (loc,
4777 fold_convert_loc (loc, type, tem));
4779 break;
4781 case GT_EXPR:
4782 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4783 MAX_EXPR, to preserve the signedness of the comparison. */
4784 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4785 OEP_ONLY_CONST)
4786 && operand_equal_p (arg01,
4787 const_binop (MINUS_EXPR, arg2,
4788 build_int_cst (type, 1)),
4789 OEP_ONLY_CONST))
4791 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4792 fold_convert_loc (loc, TREE_TYPE (arg00),
4793 arg2));
4794 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4796 break;
4798 case GE_EXPR:
4799 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4800 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4801 OEP_ONLY_CONST)
4802 && operand_equal_p (arg01,
4803 const_binop (PLUS_EXPR, arg2,
4804 build_int_cst (type, 1)),
4805 OEP_ONLY_CONST))
4807 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4808 fold_convert_loc (loc, TREE_TYPE (arg00),
4809 arg2));
4810 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4812 break;
4813 case NE_EXPR:
4814 break;
4815 default:
4816 gcc_unreachable ();
4819 return NULL_TREE;
4824 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4825 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4826 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4827 false) >= 2)
4828 #endif
4830 /* EXP is some logical combination of boolean tests. See if we can
4831 merge it into some range test. Return the new tree if so. */
4833 static tree
4834 fold_range_test (location_t loc, enum tree_code code, tree type,
4835 tree op0, tree op1)
4837 int or_op = (code == TRUTH_ORIF_EXPR
4838 || code == TRUTH_OR_EXPR);
4839 int in0_p, in1_p, in_p;
4840 tree low0, low1, low, high0, high1, high;
4841 bool strict_overflow_p = false;
4842 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4843 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4844 tree tem;
4845 const char * const warnmsg = G_("assuming signed overflow does not occur "
4846 "when simplifying range test");
4848 /* If this is an OR operation, invert both sides; we will invert
4849 again at the end. */
4850 if (or_op)
4851 in0_p = ! in0_p, in1_p = ! in1_p;
4853 /* If both expressions are the same, if we can merge the ranges, and we
4854 can build the range test, return it or it inverted. If one of the
4855 ranges is always true or always false, consider it to be the same
4856 expression as the other. */
4857 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4858 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4859 in1_p, low1, high1)
4860 && 0 != (tem = (build_range_check (loc, type,
4861 lhs != 0 ? lhs
4862 : rhs != 0 ? rhs : integer_zero_node,
4863 in_p, low, high))))
4865 if (strict_overflow_p)
4866 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4867 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4870 /* On machines where the branch cost is expensive, if this is a
4871 short-circuited branch and the underlying object on both sides
4872 is the same, make a non-short-circuit operation. */
4873 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4874 && lhs != 0 && rhs != 0
4875 && (code == TRUTH_ANDIF_EXPR
4876 || code == TRUTH_ORIF_EXPR)
4877 && operand_equal_p (lhs, rhs, 0))
4879 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4880 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4881 which cases we can't do this. */
4882 if (simple_operand_p (lhs))
4883 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4884 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4885 type, op0, op1);
4887 else if (!lang_hooks.decls.global_bindings_p ()
4888 && !CONTAINS_PLACEHOLDER_P (lhs))
4890 tree common = save_expr (lhs);
4892 if (0 != (lhs = build_range_check (loc, type, common,
4893 or_op ? ! in0_p : in0_p,
4894 low0, high0))
4895 && (0 != (rhs = build_range_check (loc, type, common,
4896 or_op ? ! in1_p : in1_p,
4897 low1, high1))))
4899 if (strict_overflow_p)
4900 fold_overflow_warning (warnmsg,
4901 WARN_STRICT_OVERFLOW_COMPARISON);
4902 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4903 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4904 type, lhs, rhs);
4909 return 0;
4912 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4913 bit value. Arrange things so the extra bits will be set to zero if and
4914 only if C is signed-extended to its full width. If MASK is nonzero,
4915 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4917 static tree
4918 unextend (tree c, int p, int unsignedp, tree mask)
4920 tree type = TREE_TYPE (c);
4921 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4922 tree temp;
4924 if (p == modesize || unsignedp)
4925 return c;
4927 /* We work by getting just the sign bit into the low-order bit, then
4928 into the high-order bit, then sign-extend. We then XOR that value
4929 with C. */
4930 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4931 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4933 /* We must use a signed type in order to get an arithmetic right shift.
4934 However, we must also avoid introducing accidental overflows, so that
4935 a subsequent call to integer_zerop will work. Hence we must
4936 do the type conversion here. At this point, the constant is either
4937 zero or one, and the conversion to a signed type can never overflow.
4938 We could get an overflow if this conversion is done anywhere else. */
4939 if (TYPE_UNSIGNED (type))
4940 temp = fold_convert (signed_type_for (type), temp);
4942 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4943 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4944 if (mask != 0)
4945 temp = const_binop (BIT_AND_EXPR, temp,
4946 fold_convert (TREE_TYPE (c), mask));
4947 /* If necessary, convert the type back to match the type of C. */
4948 if (TYPE_UNSIGNED (type))
4949 temp = fold_convert (type, temp);
4951 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4954 /* For an expression that has the form
4955 (A && B) || ~B
4957 (A || B) && ~B,
4958 we can drop one of the inner expressions and simplify to
4959 A || ~B
4961 A && ~B
4962 LOC is the location of the resulting expression. OP is the inner
4963 logical operation; the left-hand side in the examples above, while CMPOP
4964 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4965 removing a condition that guards another, as in
4966 (A != NULL && A->...) || A == NULL
4967 which we must not transform. If RHS_ONLY is true, only eliminate the
4968 right-most operand of the inner logical operation. */
4970 static tree
4971 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4972 bool rhs_only)
4974 tree type = TREE_TYPE (cmpop);
4975 enum tree_code code = TREE_CODE (cmpop);
4976 enum tree_code truthop_code = TREE_CODE (op);
4977 tree lhs = TREE_OPERAND (op, 0);
4978 tree rhs = TREE_OPERAND (op, 1);
4979 tree orig_lhs = lhs, orig_rhs = rhs;
4980 enum tree_code rhs_code = TREE_CODE (rhs);
4981 enum tree_code lhs_code = TREE_CODE (lhs);
4982 enum tree_code inv_code;
4984 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4985 return NULL_TREE;
4987 if (TREE_CODE_CLASS (code) != tcc_comparison)
4988 return NULL_TREE;
4990 if (rhs_code == truthop_code)
4992 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4993 if (newrhs != NULL_TREE)
4995 rhs = newrhs;
4996 rhs_code = TREE_CODE (rhs);
4999 if (lhs_code == truthop_code && !rhs_only)
5001 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5002 if (newlhs != NULL_TREE)
5004 lhs = newlhs;
5005 lhs_code = TREE_CODE (lhs);
5009 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5010 if (inv_code == rhs_code
5011 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5012 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5013 return lhs;
5014 if (!rhs_only && inv_code == lhs_code
5015 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5016 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5017 return rhs;
5018 if (rhs != orig_rhs || lhs != orig_lhs)
5019 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5020 lhs, rhs);
5021 return NULL_TREE;
5024 /* Find ways of folding logical expressions of LHS and RHS:
5025 Try to merge two comparisons to the same innermost item.
5026 Look for range tests like "ch >= '0' && ch <= '9'".
5027 Look for combinations of simple terms on machines with expensive branches
5028 and evaluate the RHS unconditionally.
5030 For example, if we have p->a == 2 && p->b == 4 and we can make an
5031 object large enough to span both A and B, we can do this with a comparison
5032 against the object ANDed with the a mask.
5034 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5035 operations to do this with one comparison.
5037 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5038 function and the one above.
5040 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5041 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5043 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5044 two operands.
5046 We return the simplified tree or 0 if no optimization is possible. */
5048 static tree
5049 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5050 tree lhs, tree rhs)
5052 /* If this is the "or" of two comparisons, we can do something if
5053 the comparisons are NE_EXPR. If this is the "and", we can do something
5054 if the comparisons are EQ_EXPR. I.e.,
5055 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5057 WANTED_CODE is this operation code. For single bit fields, we can
5058 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5059 comparison for one-bit fields. */
5061 enum tree_code wanted_code;
5062 enum tree_code lcode, rcode;
5063 tree ll_arg, lr_arg, rl_arg, rr_arg;
5064 tree ll_inner, lr_inner, rl_inner, rr_inner;
5065 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5066 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5067 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5068 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5069 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5070 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5071 enum machine_mode lnmode, rnmode;
5072 tree ll_mask, lr_mask, rl_mask, rr_mask;
5073 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5074 tree l_const, r_const;
5075 tree lntype, rntype, result;
5076 HOST_WIDE_INT first_bit, end_bit;
5077 int volatilep;
5079 /* Start by getting the comparison codes. Fail if anything is volatile.
5080 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5081 it were surrounded with a NE_EXPR. */
5083 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5084 return 0;
5086 lcode = TREE_CODE (lhs);
5087 rcode = TREE_CODE (rhs);
5089 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5091 lhs = build2 (NE_EXPR, truth_type, lhs,
5092 build_int_cst (TREE_TYPE (lhs), 0));
5093 lcode = NE_EXPR;
5096 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5098 rhs = build2 (NE_EXPR, truth_type, rhs,
5099 build_int_cst (TREE_TYPE (rhs), 0));
5100 rcode = NE_EXPR;
5103 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5104 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5105 return 0;
5107 ll_arg = TREE_OPERAND (lhs, 0);
5108 lr_arg = TREE_OPERAND (lhs, 1);
5109 rl_arg = TREE_OPERAND (rhs, 0);
5110 rr_arg = TREE_OPERAND (rhs, 1);
5112 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5113 if (simple_operand_p (ll_arg)
5114 && simple_operand_p (lr_arg))
5116 if (operand_equal_p (ll_arg, rl_arg, 0)
5117 && operand_equal_p (lr_arg, rr_arg, 0))
5119 result = combine_comparisons (loc, code, lcode, rcode,
5120 truth_type, ll_arg, lr_arg);
5121 if (result)
5122 return result;
5124 else if (operand_equal_p (ll_arg, rr_arg, 0)
5125 && operand_equal_p (lr_arg, rl_arg, 0))
5127 result = combine_comparisons (loc, code, lcode,
5128 swap_tree_comparison (rcode),
5129 truth_type, ll_arg, lr_arg);
5130 if (result)
5131 return result;
5135 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5136 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5138 /* If the RHS can be evaluated unconditionally and its operands are
5139 simple, it wins to evaluate the RHS unconditionally on machines
5140 with expensive branches. In this case, this isn't a comparison
5141 that can be merged. */
5143 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5144 false) >= 2
5145 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5146 && simple_operand_p (rl_arg)
5147 && simple_operand_p (rr_arg))
5149 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5150 if (code == TRUTH_OR_EXPR
5151 && lcode == NE_EXPR && integer_zerop (lr_arg)
5152 && rcode == NE_EXPR && integer_zerop (rr_arg)
5153 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5154 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5155 return build2_loc (loc, NE_EXPR, truth_type,
5156 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5157 ll_arg, rl_arg),
5158 build_int_cst (TREE_TYPE (ll_arg), 0));
5160 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5161 if (code == TRUTH_AND_EXPR
5162 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5163 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5164 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5165 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5166 return build2_loc (loc, EQ_EXPR, truth_type,
5167 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5168 ll_arg, rl_arg),
5169 build_int_cst (TREE_TYPE (ll_arg), 0));
5172 /* See if the comparisons can be merged. Then get all the parameters for
5173 each side. */
5175 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5176 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5177 return 0;
5179 volatilep = 0;
5180 ll_inner = decode_field_reference (loc, ll_arg,
5181 &ll_bitsize, &ll_bitpos, &ll_mode,
5182 &ll_unsignedp, &volatilep, &ll_mask,
5183 &ll_and_mask);
5184 lr_inner = decode_field_reference (loc, lr_arg,
5185 &lr_bitsize, &lr_bitpos, &lr_mode,
5186 &lr_unsignedp, &volatilep, &lr_mask,
5187 &lr_and_mask);
5188 rl_inner = decode_field_reference (loc, rl_arg,
5189 &rl_bitsize, &rl_bitpos, &rl_mode,
5190 &rl_unsignedp, &volatilep, &rl_mask,
5191 &rl_and_mask);
5192 rr_inner = decode_field_reference (loc, rr_arg,
5193 &rr_bitsize, &rr_bitpos, &rr_mode,
5194 &rr_unsignedp, &volatilep, &rr_mask,
5195 &rr_and_mask);
5197 /* It must be true that the inner operation on the lhs of each
5198 comparison must be the same if we are to be able to do anything.
5199 Then see if we have constants. If not, the same must be true for
5200 the rhs's. */
5201 if (volatilep || ll_inner == 0 || rl_inner == 0
5202 || ! operand_equal_p (ll_inner, rl_inner, 0))
5203 return 0;
5205 if (TREE_CODE (lr_arg) == INTEGER_CST
5206 && TREE_CODE (rr_arg) == INTEGER_CST)
5207 l_const = lr_arg, r_const = rr_arg;
5208 else if (lr_inner == 0 || rr_inner == 0
5209 || ! operand_equal_p (lr_inner, rr_inner, 0))
5210 return 0;
5211 else
5212 l_const = r_const = 0;
5214 /* If either comparison code is not correct for our logical operation,
5215 fail. However, we can convert a one-bit comparison against zero into
5216 the opposite comparison against that bit being set in the field. */
5218 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5219 if (lcode != wanted_code)
5221 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5223 /* Make the left operand unsigned, since we are only interested
5224 in the value of one bit. Otherwise we are doing the wrong
5225 thing below. */
5226 ll_unsignedp = 1;
5227 l_const = ll_mask;
5229 else
5230 return 0;
5233 /* This is analogous to the code for l_const above. */
5234 if (rcode != wanted_code)
5236 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5238 rl_unsignedp = 1;
5239 r_const = rl_mask;
5241 else
5242 return 0;
5245 /* See if we can find a mode that contains both fields being compared on
5246 the left. If we can't, fail. Otherwise, update all constants and masks
5247 to be relative to a field of that size. */
5248 first_bit = MIN (ll_bitpos, rl_bitpos);
5249 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5250 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5251 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5252 volatilep);
5253 if (lnmode == VOIDmode)
5254 return 0;
5256 lnbitsize = GET_MODE_BITSIZE (lnmode);
5257 lnbitpos = first_bit & ~ (lnbitsize - 1);
5258 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5259 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5261 if (BYTES_BIG_ENDIAN)
5263 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5264 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5267 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5268 size_int (xll_bitpos));
5269 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5270 size_int (xrl_bitpos));
5272 if (l_const)
5274 l_const = fold_convert_loc (loc, lntype, l_const);
5275 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5276 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5277 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5278 fold_build1_loc (loc, BIT_NOT_EXPR,
5279 lntype, ll_mask))))
5281 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5283 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5286 if (r_const)
5288 r_const = fold_convert_loc (loc, lntype, r_const);
5289 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5290 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5291 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5292 fold_build1_loc (loc, BIT_NOT_EXPR,
5293 lntype, rl_mask))))
5295 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5297 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5301 /* If the right sides are not constant, do the same for it. Also,
5302 disallow this optimization if a size or signedness mismatch occurs
5303 between the left and right sides. */
5304 if (l_const == 0)
5306 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5307 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5308 /* Make sure the two fields on the right
5309 correspond to the left without being swapped. */
5310 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5311 return 0;
5313 first_bit = MIN (lr_bitpos, rr_bitpos);
5314 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5315 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5316 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5317 volatilep);
5318 if (rnmode == VOIDmode)
5319 return 0;
5321 rnbitsize = GET_MODE_BITSIZE (rnmode);
5322 rnbitpos = first_bit & ~ (rnbitsize - 1);
5323 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5324 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5326 if (BYTES_BIG_ENDIAN)
5328 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5329 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5332 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5333 rntype, lr_mask),
5334 size_int (xlr_bitpos));
5335 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5336 rntype, rr_mask),
5337 size_int (xrr_bitpos));
5339 /* Make a mask that corresponds to both fields being compared.
5340 Do this for both items being compared. If the operands are the
5341 same size and the bits being compared are in the same position
5342 then we can do this by masking both and comparing the masked
5343 results. */
5344 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5345 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5346 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5348 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5349 ll_unsignedp || rl_unsignedp);
5350 if (! all_ones_mask_p (ll_mask, lnbitsize))
5351 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5353 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5354 lr_unsignedp || rr_unsignedp);
5355 if (! all_ones_mask_p (lr_mask, rnbitsize))
5356 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5358 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5361 /* There is still another way we can do something: If both pairs of
5362 fields being compared are adjacent, we may be able to make a wider
5363 field containing them both.
5365 Note that we still must mask the lhs/rhs expressions. Furthermore,
5366 the mask must be shifted to account for the shift done by
5367 make_bit_field_ref. */
5368 if ((ll_bitsize + ll_bitpos == rl_bitpos
5369 && lr_bitsize + lr_bitpos == rr_bitpos)
5370 || (ll_bitpos == rl_bitpos + rl_bitsize
5371 && lr_bitpos == rr_bitpos + rr_bitsize))
5373 tree type;
5375 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5376 ll_bitsize + rl_bitsize,
5377 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5378 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5379 lr_bitsize + rr_bitsize,
5380 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5382 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5383 size_int (MIN (xll_bitpos, xrl_bitpos)));
5384 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5385 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5387 /* Convert to the smaller type before masking out unwanted bits. */
5388 type = lntype;
5389 if (lntype != rntype)
5391 if (lnbitsize > rnbitsize)
5393 lhs = fold_convert_loc (loc, rntype, lhs);
5394 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5395 type = rntype;
5397 else if (lnbitsize < rnbitsize)
5399 rhs = fold_convert_loc (loc, lntype, rhs);
5400 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5401 type = lntype;
5405 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5406 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5408 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5409 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5411 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5414 return 0;
5417 /* Handle the case of comparisons with constants. If there is something in
5418 common between the masks, those bits of the constants must be the same.
5419 If not, the condition is always false. Test for this to avoid generating
5420 incorrect code below. */
5421 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5422 if (! integer_zerop (result)
5423 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5424 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5426 if (wanted_code == NE_EXPR)
5428 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5429 return constant_boolean_node (true, truth_type);
5431 else
5433 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5434 return constant_boolean_node (false, truth_type);
5438 /* Construct the expression we will return. First get the component
5439 reference we will make. Unless the mask is all ones the width of
5440 that field, perform the mask operation. Then compare with the
5441 merged constant. */
5442 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5443 ll_unsignedp || rl_unsignedp);
5445 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5446 if (! all_ones_mask_p (ll_mask, lnbitsize))
5447 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5449 return build2_loc (loc, wanted_code, truth_type, result,
5450 const_binop (BIT_IOR_EXPR, l_const, r_const));
5453 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5454 constant. */
5456 static tree
5457 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5458 tree op0, tree op1)
5460 tree arg0 = op0;
5461 enum tree_code op_code;
5462 tree comp_const;
5463 tree minmax_const;
5464 int consts_equal, consts_lt;
5465 tree inner;
5467 STRIP_SIGN_NOPS (arg0);
5469 op_code = TREE_CODE (arg0);
5470 minmax_const = TREE_OPERAND (arg0, 1);
5471 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5472 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5473 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5474 inner = TREE_OPERAND (arg0, 0);
5476 /* If something does not permit us to optimize, return the original tree. */
5477 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5478 || TREE_CODE (comp_const) != INTEGER_CST
5479 || TREE_OVERFLOW (comp_const)
5480 || TREE_CODE (minmax_const) != INTEGER_CST
5481 || TREE_OVERFLOW (minmax_const))
5482 return NULL_TREE;
5484 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5485 and GT_EXPR, doing the rest with recursive calls using logical
5486 simplifications. */
5487 switch (code)
5489 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5491 tree tem
5492 = optimize_minmax_comparison (loc,
5493 invert_tree_comparison (code, false),
5494 type, op0, op1);
5495 if (tem)
5496 return invert_truthvalue_loc (loc, tem);
5497 return NULL_TREE;
5500 case GE_EXPR:
5501 return
5502 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5503 optimize_minmax_comparison
5504 (loc, EQ_EXPR, type, arg0, comp_const),
5505 optimize_minmax_comparison
5506 (loc, GT_EXPR, type, arg0, comp_const));
5508 case EQ_EXPR:
5509 if (op_code == MAX_EXPR && consts_equal)
5510 /* MAX (X, 0) == 0 -> X <= 0 */
5511 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5513 else if (op_code == MAX_EXPR && consts_lt)
5514 /* MAX (X, 0) == 5 -> X == 5 */
5515 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5517 else if (op_code == MAX_EXPR)
5518 /* MAX (X, 0) == -1 -> false */
5519 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5521 else if (consts_equal)
5522 /* MIN (X, 0) == 0 -> X >= 0 */
5523 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5525 else if (consts_lt)
5526 /* MIN (X, 0) == 5 -> false */
5527 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5529 else
5530 /* MIN (X, 0) == -1 -> X == -1 */
5531 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5533 case GT_EXPR:
5534 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5535 /* MAX (X, 0) > 0 -> X > 0
5536 MAX (X, 0) > 5 -> X > 5 */
5537 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5539 else if (op_code == MAX_EXPR)
5540 /* MAX (X, 0) > -1 -> true */
5541 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5543 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5544 /* MIN (X, 0) > 0 -> false
5545 MIN (X, 0) > 5 -> false */
5546 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5548 else
5549 /* MIN (X, 0) > -1 -> X > -1 */
5550 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5552 default:
5553 return NULL_TREE;
5557 /* T is an integer expression that is being multiplied, divided, or taken a
5558 modulus (CODE says which and what kind of divide or modulus) by a
5559 constant C. See if we can eliminate that operation by folding it with
5560 other operations already in T. WIDE_TYPE, if non-null, is a type that
5561 should be used for the computation if wider than our type.
5563 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5564 (X * 2) + (Y * 4). We must, however, be assured that either the original
5565 expression would not overflow or that overflow is undefined for the type
5566 in the language in question.
5568 If we return a non-null expression, it is an equivalent form of the
5569 original computation, but need not be in the original type.
5571 We set *STRICT_OVERFLOW_P to true if the return values depends on
5572 signed overflow being undefined. Otherwise we do not change
5573 *STRICT_OVERFLOW_P. */
5575 static tree
5576 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5577 bool *strict_overflow_p)
5579 /* To avoid exponential search depth, refuse to allow recursion past
5580 three levels. Beyond that (1) it's highly unlikely that we'll find
5581 something interesting and (2) we've probably processed it before
5582 when we built the inner expression. */
5584 static int depth;
5585 tree ret;
5587 if (depth > 3)
5588 return NULL;
5590 depth++;
5591 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5592 depth--;
5594 return ret;
5597 static tree
5598 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5599 bool *strict_overflow_p)
5601 tree type = TREE_TYPE (t);
5602 enum tree_code tcode = TREE_CODE (t);
5603 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5604 > GET_MODE_SIZE (TYPE_MODE (type)))
5605 ? wide_type : type);
5606 tree t1, t2;
5607 int same_p = tcode == code;
5608 tree op0 = NULL_TREE, op1 = NULL_TREE;
5609 bool sub_strict_overflow_p;
5611 /* Don't deal with constants of zero here; they confuse the code below. */
5612 if (integer_zerop (c))
5613 return NULL_TREE;
5615 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5616 op0 = TREE_OPERAND (t, 0);
5618 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5619 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5621 /* Note that we need not handle conditional operations here since fold
5622 already handles those cases. So just do arithmetic here. */
5623 switch (tcode)
5625 case INTEGER_CST:
5626 /* For a constant, we can always simplify if we are a multiply
5627 or (for divide and modulus) if it is a multiple of our constant. */
5628 if (code == MULT_EXPR
5629 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5630 return const_binop (code, fold_convert (ctype, t),
5631 fold_convert (ctype, c));
5632 break;
5634 CASE_CONVERT: case NON_LVALUE_EXPR:
5635 /* If op0 is an expression ... */
5636 if ((COMPARISON_CLASS_P (op0)
5637 || UNARY_CLASS_P (op0)
5638 || BINARY_CLASS_P (op0)
5639 || VL_EXP_CLASS_P (op0)
5640 || EXPRESSION_CLASS_P (op0))
5641 /* ... and has wrapping overflow, and its type is smaller
5642 than ctype, then we cannot pass through as widening. */
5643 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5644 && (TYPE_PRECISION (ctype)
5645 > TYPE_PRECISION (TREE_TYPE (op0))))
5646 /* ... or this is a truncation (t is narrower than op0),
5647 then we cannot pass through this narrowing. */
5648 || (TYPE_PRECISION (type)
5649 < TYPE_PRECISION (TREE_TYPE (op0)))
5650 /* ... or signedness changes for division or modulus,
5651 then we cannot pass through this conversion. */
5652 || (code != MULT_EXPR
5653 && (TYPE_UNSIGNED (ctype)
5654 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5655 /* ... or has undefined overflow while the converted to
5656 type has not, we cannot do the operation in the inner type
5657 as that would introduce undefined overflow. */
5658 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5659 && !TYPE_OVERFLOW_UNDEFINED (type))))
5660 break;
5662 /* Pass the constant down and see if we can make a simplification. If
5663 we can, replace this expression with the inner simplification for
5664 possible later conversion to our or some other type. */
5665 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5666 && TREE_CODE (t2) == INTEGER_CST
5667 && !TREE_OVERFLOW (t2)
5668 && (0 != (t1 = extract_muldiv (op0, t2, code,
5669 code == MULT_EXPR
5670 ? ctype : NULL_TREE,
5671 strict_overflow_p))))
5672 return t1;
5673 break;
5675 case ABS_EXPR:
5676 /* If widening the type changes it from signed to unsigned, then we
5677 must avoid building ABS_EXPR itself as unsigned. */
5678 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5680 tree cstype = (*signed_type_for) (ctype);
5681 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5682 != 0)
5684 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5685 return fold_convert (ctype, t1);
5687 break;
5689 /* If the constant is negative, we cannot simplify this. */
5690 if (tree_int_cst_sgn (c) == -1)
5691 break;
5692 /* FALLTHROUGH */
5693 case NEGATE_EXPR:
5694 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5695 != 0)
5696 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5697 break;
5699 case MIN_EXPR: case MAX_EXPR:
5700 /* If widening the type changes the signedness, then we can't perform
5701 this optimization as that changes the result. */
5702 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5703 break;
5705 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5706 sub_strict_overflow_p = false;
5707 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5708 &sub_strict_overflow_p)) != 0
5709 && (t2 = extract_muldiv (op1, c, code, wide_type,
5710 &sub_strict_overflow_p)) != 0)
5712 if (tree_int_cst_sgn (c) < 0)
5713 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5714 if (sub_strict_overflow_p)
5715 *strict_overflow_p = true;
5716 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5717 fold_convert (ctype, t2));
5719 break;
5721 case LSHIFT_EXPR: case RSHIFT_EXPR:
5722 /* If the second operand is constant, this is a multiplication
5723 or floor division, by a power of two, so we can treat it that
5724 way unless the multiplier or divisor overflows. Signed
5725 left-shift overflow is implementation-defined rather than
5726 undefined in C90, so do not convert signed left shift into
5727 multiplication. */
5728 if (TREE_CODE (op1) == INTEGER_CST
5729 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5730 /* const_binop may not detect overflow correctly,
5731 so check for it explicitly here. */
5732 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5733 && TREE_INT_CST_HIGH (op1) == 0
5734 && 0 != (t1 = fold_convert (ctype,
5735 const_binop (LSHIFT_EXPR,
5736 size_one_node,
5737 op1)))
5738 && !TREE_OVERFLOW (t1))
5739 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5740 ? MULT_EXPR : FLOOR_DIV_EXPR,
5741 ctype,
5742 fold_convert (ctype, op0),
5743 t1),
5744 c, code, wide_type, strict_overflow_p);
5745 break;
5747 case PLUS_EXPR: case MINUS_EXPR:
5748 /* See if we can eliminate the operation on both sides. If we can, we
5749 can return a new PLUS or MINUS. If we can't, the only remaining
5750 cases where we can do anything are if the second operand is a
5751 constant. */
5752 sub_strict_overflow_p = false;
5753 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5754 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5755 if (t1 != 0 && t2 != 0
5756 && (code == MULT_EXPR
5757 /* If not multiplication, we can only do this if both operands
5758 are divisible by c. */
5759 || (multiple_of_p (ctype, op0, c)
5760 && multiple_of_p (ctype, op1, c))))
5762 if (sub_strict_overflow_p)
5763 *strict_overflow_p = true;
5764 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5765 fold_convert (ctype, t2));
5768 /* If this was a subtraction, negate OP1 and set it to be an addition.
5769 This simplifies the logic below. */
5770 if (tcode == MINUS_EXPR)
5772 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5773 /* If OP1 was not easily negatable, the constant may be OP0. */
5774 if (TREE_CODE (op0) == INTEGER_CST)
5776 tree tem = op0;
5777 op0 = op1;
5778 op1 = tem;
5779 tem = t1;
5780 t1 = t2;
5781 t2 = tem;
5785 if (TREE_CODE (op1) != INTEGER_CST)
5786 break;
5788 /* If either OP1 or C are negative, this optimization is not safe for
5789 some of the division and remainder types while for others we need
5790 to change the code. */
5791 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5793 if (code == CEIL_DIV_EXPR)
5794 code = FLOOR_DIV_EXPR;
5795 else if (code == FLOOR_DIV_EXPR)
5796 code = CEIL_DIV_EXPR;
5797 else if (code != MULT_EXPR
5798 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5799 break;
5802 /* If it's a multiply or a division/modulus operation of a multiple
5803 of our constant, do the operation and verify it doesn't overflow. */
5804 if (code == MULT_EXPR
5805 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5807 op1 = const_binop (code, fold_convert (ctype, op1),
5808 fold_convert (ctype, c));
5809 /* We allow the constant to overflow with wrapping semantics. */
5810 if (op1 == 0
5811 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5812 break;
5814 else
5815 break;
5817 /* If we have an unsigned type, we cannot widen the operation since it
5818 will change the result if the original computation overflowed. */
5819 if (TYPE_UNSIGNED (ctype) && ctype != type)
5820 break;
5822 /* If we were able to eliminate our operation from the first side,
5823 apply our operation to the second side and reform the PLUS. */
5824 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5825 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5827 /* The last case is if we are a multiply. In that case, we can
5828 apply the distributive law to commute the multiply and addition
5829 if the multiplication of the constants doesn't overflow. */
5830 if (code == MULT_EXPR)
5831 return fold_build2 (tcode, ctype,
5832 fold_build2 (code, ctype,
5833 fold_convert (ctype, op0),
5834 fold_convert (ctype, c)),
5835 op1);
5837 break;
5839 case MULT_EXPR:
5840 /* We have a special case here if we are doing something like
5841 (C * 8) % 4 since we know that's zero. */
5842 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5843 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5844 /* If the multiplication can overflow we cannot optimize this. */
5845 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5846 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5847 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5849 *strict_overflow_p = true;
5850 return omit_one_operand (type, integer_zero_node, op0);
5853 /* ... fall through ... */
5855 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5856 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5857 /* If we can extract our operation from the LHS, do so and return a
5858 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5859 do something only if the second operand is a constant. */
5860 if (same_p
5861 && (t1 = extract_muldiv (op0, c, code, wide_type,
5862 strict_overflow_p)) != 0)
5863 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5864 fold_convert (ctype, op1));
5865 else if (tcode == MULT_EXPR && code == MULT_EXPR
5866 && (t1 = extract_muldiv (op1, c, code, wide_type,
5867 strict_overflow_p)) != 0)
5868 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5869 fold_convert (ctype, t1));
5870 else if (TREE_CODE (op1) != INTEGER_CST)
5871 return 0;
5873 /* If these are the same operation types, we can associate them
5874 assuming no overflow. */
5875 if (tcode == code)
5877 double_int mul;
5878 bool overflow_p;
5879 unsigned prec = TYPE_PRECISION (ctype);
5880 bool uns = TYPE_UNSIGNED (ctype);
5881 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5882 double_int dic = tree_to_double_int (c).ext (prec, uns);
5883 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5884 overflow_p = ((!uns && overflow_p)
5885 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5886 if (!double_int_fits_to_tree_p (ctype, mul)
5887 && ((uns && tcode != MULT_EXPR) || !uns))
5888 overflow_p = 1;
5889 if (!overflow_p)
5890 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5891 double_int_to_tree (ctype, mul));
5894 /* If these operations "cancel" each other, we have the main
5895 optimizations of this pass, which occur when either constant is a
5896 multiple of the other, in which case we replace this with either an
5897 operation or CODE or TCODE.
5899 If we have an unsigned type, we cannot do this since it will change
5900 the result if the original computation overflowed. */
5901 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5902 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5903 || (tcode == MULT_EXPR
5904 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5905 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5906 && code != MULT_EXPR)))
5908 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5910 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5911 *strict_overflow_p = true;
5912 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5913 fold_convert (ctype,
5914 const_binop (TRUNC_DIV_EXPR,
5915 op1, c)));
5917 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5919 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5920 *strict_overflow_p = true;
5921 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5922 fold_convert (ctype,
5923 const_binop (TRUNC_DIV_EXPR,
5924 c, op1)));
5927 break;
5929 default:
5930 break;
5933 return 0;
5936 /* Return a node which has the indicated constant VALUE (either 0 or
5937 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5938 and is of the indicated TYPE. */
5940 tree
5941 constant_boolean_node (bool value, tree type)
5943 if (type == integer_type_node)
5944 return value ? integer_one_node : integer_zero_node;
5945 else if (type == boolean_type_node)
5946 return value ? boolean_true_node : boolean_false_node;
5947 else if (TREE_CODE (type) == VECTOR_TYPE)
5948 return build_vector_from_val (type,
5949 build_int_cst (TREE_TYPE (type),
5950 value ? -1 : 0));
5951 else
5952 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5956 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5957 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5958 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5959 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5960 COND is the first argument to CODE; otherwise (as in the example
5961 given here), it is the second argument. TYPE is the type of the
5962 original expression. Return NULL_TREE if no simplification is
5963 possible. */
5965 static tree
5966 fold_binary_op_with_conditional_arg (location_t loc,
5967 enum tree_code code,
5968 tree type, tree op0, tree op1,
5969 tree cond, tree arg, int cond_first_p)
5971 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5972 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5973 tree test, true_value, false_value;
5974 tree lhs = NULL_TREE;
5975 tree rhs = NULL_TREE;
5976 enum tree_code cond_code = COND_EXPR;
5978 if (TREE_CODE (cond) == COND_EXPR
5979 || TREE_CODE (cond) == VEC_COND_EXPR)
5981 test = TREE_OPERAND (cond, 0);
5982 true_value = TREE_OPERAND (cond, 1);
5983 false_value = TREE_OPERAND (cond, 2);
5984 /* If this operand throws an expression, then it does not make
5985 sense to try to perform a logical or arithmetic operation
5986 involving it. */
5987 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5988 lhs = true_value;
5989 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5990 rhs = false_value;
5992 else
5994 tree testtype = TREE_TYPE (cond);
5995 test = cond;
5996 true_value = constant_boolean_node (true, testtype);
5997 false_value = constant_boolean_node (false, testtype);
6000 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6001 cond_code = VEC_COND_EXPR;
6003 /* This transformation is only worthwhile if we don't have to wrap ARG
6004 in a SAVE_EXPR and the operation can be simplified without recursing
6005 on at least one of the branches once its pushed inside the COND_EXPR. */
6006 if (!TREE_CONSTANT (arg)
6007 && (TREE_SIDE_EFFECTS (arg)
6008 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6009 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6010 return NULL_TREE;
6012 arg = fold_convert_loc (loc, arg_type, arg);
6013 if (lhs == 0)
6015 true_value = fold_convert_loc (loc, cond_type, true_value);
6016 if (cond_first_p)
6017 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6018 else
6019 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6021 if (rhs == 0)
6023 false_value = fold_convert_loc (loc, cond_type, false_value);
6024 if (cond_first_p)
6025 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6026 else
6027 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6030 /* Check that we have simplified at least one of the branches. */
6031 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6032 return NULL_TREE;
6034 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6038 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6040 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6041 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6042 ADDEND is the same as X.
6044 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6045 and finite. The problematic cases are when X is zero, and its mode
6046 has signed zeros. In the case of rounding towards -infinity,
6047 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6048 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6050 bool
6051 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6053 if (!real_zerop (addend))
6054 return false;
6056 /* Don't allow the fold with -fsignaling-nans. */
6057 if (HONOR_SNANS (TYPE_MODE (type)))
6058 return false;
6060 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6061 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6062 return true;
6064 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6065 if (TREE_CODE (addend) == REAL_CST
6066 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6067 negate = !negate;
6069 /* The mode has signed zeros, and we have to honor their sign.
6070 In this situation, there is only one case we can return true for.
6071 X - 0 is the same as X unless rounding towards -infinity is
6072 supported. */
6073 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6076 /* Subroutine of fold() that checks comparisons of built-in math
6077 functions against real constants.
6079 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6080 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6081 is the type of the result and ARG0 and ARG1 are the operands of the
6082 comparison. ARG1 must be a TREE_REAL_CST.
6084 The function returns the constant folded tree if a simplification
6085 can be made, and NULL_TREE otherwise. */
6087 static tree
6088 fold_mathfn_compare (location_t loc,
6089 enum built_in_function fcode, enum tree_code code,
6090 tree type, tree arg0, tree arg1)
6092 REAL_VALUE_TYPE c;
6094 if (BUILTIN_SQRT_P (fcode))
6096 tree arg = CALL_EXPR_ARG (arg0, 0);
6097 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6099 c = TREE_REAL_CST (arg1);
6100 if (REAL_VALUE_NEGATIVE (c))
6102 /* sqrt(x) < y is always false, if y is negative. */
6103 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6104 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6106 /* sqrt(x) > y is always true, if y is negative and we
6107 don't care about NaNs, i.e. negative values of x. */
6108 if (code == NE_EXPR || !HONOR_NANS (mode))
6109 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6111 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6112 return fold_build2_loc (loc, GE_EXPR, type, arg,
6113 build_real (TREE_TYPE (arg), dconst0));
6115 else if (code == GT_EXPR || code == GE_EXPR)
6117 REAL_VALUE_TYPE c2;
6119 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6120 real_convert (&c2, mode, &c2);
6122 if (REAL_VALUE_ISINF (c2))
6124 /* sqrt(x) > y is x == +Inf, when y is very large. */
6125 if (HONOR_INFINITIES (mode))
6126 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6127 build_real (TREE_TYPE (arg), c2));
6129 /* sqrt(x) > y is always false, when y is very large
6130 and we don't care about infinities. */
6131 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6134 /* sqrt(x) > c is the same as x > c*c. */
6135 return fold_build2_loc (loc, code, type, arg,
6136 build_real (TREE_TYPE (arg), c2));
6138 else if (code == LT_EXPR || code == LE_EXPR)
6140 REAL_VALUE_TYPE c2;
6142 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6143 real_convert (&c2, mode, &c2);
6145 if (REAL_VALUE_ISINF (c2))
6147 /* sqrt(x) < y is always true, when y is a very large
6148 value and we don't care about NaNs or Infinities. */
6149 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6150 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6152 /* sqrt(x) < y is x != +Inf when y is very large and we
6153 don't care about NaNs. */
6154 if (! HONOR_NANS (mode))
6155 return fold_build2_loc (loc, NE_EXPR, type, arg,
6156 build_real (TREE_TYPE (arg), c2));
6158 /* sqrt(x) < y is x >= 0 when y is very large and we
6159 don't care about Infinities. */
6160 if (! HONOR_INFINITIES (mode))
6161 return fold_build2_loc (loc, GE_EXPR, type, arg,
6162 build_real (TREE_TYPE (arg), dconst0));
6164 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6165 arg = save_expr (arg);
6166 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6167 fold_build2_loc (loc, GE_EXPR, type, arg,
6168 build_real (TREE_TYPE (arg),
6169 dconst0)),
6170 fold_build2_loc (loc, NE_EXPR, type, arg,
6171 build_real (TREE_TYPE (arg),
6172 c2)));
6175 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6176 if (! HONOR_NANS (mode))
6177 return fold_build2_loc (loc, code, type, arg,
6178 build_real (TREE_TYPE (arg), c2));
6180 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6181 arg = save_expr (arg);
6182 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6183 fold_build2_loc (loc, GE_EXPR, type, arg,
6184 build_real (TREE_TYPE (arg),
6185 dconst0)),
6186 fold_build2_loc (loc, code, type, arg,
6187 build_real (TREE_TYPE (arg),
6188 c2)));
6192 return NULL_TREE;
6195 /* Subroutine of fold() that optimizes comparisons against Infinities,
6196 either +Inf or -Inf.
6198 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6199 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6200 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6202 The function returns the constant folded tree if a simplification
6203 can be made, and NULL_TREE otherwise. */
6205 static tree
6206 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6207 tree arg0, tree arg1)
6209 enum machine_mode mode;
6210 REAL_VALUE_TYPE max;
6211 tree temp;
6212 bool neg;
6214 mode = TYPE_MODE (TREE_TYPE (arg0));
6216 /* For negative infinity swap the sense of the comparison. */
6217 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6218 if (neg)
6219 code = swap_tree_comparison (code);
6221 switch (code)
6223 case GT_EXPR:
6224 /* x > +Inf is always false, if with ignore sNANs. */
6225 if (HONOR_SNANS (mode))
6226 return NULL_TREE;
6227 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6229 case LE_EXPR:
6230 /* x <= +Inf is always true, if we don't case about NaNs. */
6231 if (! HONOR_NANS (mode))
6232 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6234 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6235 arg0 = save_expr (arg0);
6236 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6238 case EQ_EXPR:
6239 case GE_EXPR:
6240 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6241 real_maxval (&max, neg, mode);
6242 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6243 arg0, build_real (TREE_TYPE (arg0), max));
6245 case LT_EXPR:
6246 /* x < +Inf is always equal to x <= DBL_MAX. */
6247 real_maxval (&max, neg, mode);
6248 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6249 arg0, build_real (TREE_TYPE (arg0), max));
6251 case NE_EXPR:
6252 /* x != +Inf is always equal to !(x > DBL_MAX). */
6253 real_maxval (&max, neg, mode);
6254 if (! HONOR_NANS (mode))
6255 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6256 arg0, build_real (TREE_TYPE (arg0), max));
6258 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6259 arg0, build_real (TREE_TYPE (arg0), max));
6260 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6262 default:
6263 break;
6266 return NULL_TREE;
6269 /* Subroutine of fold() that optimizes comparisons of a division by
6270 a nonzero integer constant against an integer constant, i.e.
6271 X/C1 op C2.
6273 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6274 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6275 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6277 The function returns the constant folded tree if a simplification
6278 can be made, and NULL_TREE otherwise. */
6280 static tree
6281 fold_div_compare (location_t loc,
6282 enum tree_code code, tree type, tree arg0, tree arg1)
6284 tree prod, tmp, hi, lo;
6285 tree arg00 = TREE_OPERAND (arg0, 0);
6286 tree arg01 = TREE_OPERAND (arg0, 1);
6287 double_int val;
6288 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6289 bool neg_overflow;
6290 bool overflow;
6292 /* We have to do this the hard way to detect unsigned overflow.
6293 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6294 val = TREE_INT_CST (arg01)
6295 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6296 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6297 neg_overflow = false;
6299 if (unsigned_p)
6301 tmp = int_const_binop (MINUS_EXPR, arg01,
6302 build_int_cst (TREE_TYPE (arg01), 1));
6303 lo = prod;
6305 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6306 val = TREE_INT_CST (prod)
6307 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6308 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6309 -1, overflow | TREE_OVERFLOW (prod));
6311 else if (tree_int_cst_sgn (arg01) >= 0)
6313 tmp = int_const_binop (MINUS_EXPR, arg01,
6314 build_int_cst (TREE_TYPE (arg01), 1));
6315 switch (tree_int_cst_sgn (arg1))
6317 case -1:
6318 neg_overflow = true;
6319 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6320 hi = prod;
6321 break;
6323 case 0:
6324 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6325 hi = tmp;
6326 break;
6328 case 1:
6329 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6330 lo = prod;
6331 break;
6333 default:
6334 gcc_unreachable ();
6337 else
6339 /* A negative divisor reverses the relational operators. */
6340 code = swap_tree_comparison (code);
6342 tmp = int_const_binop (PLUS_EXPR, arg01,
6343 build_int_cst (TREE_TYPE (arg01), 1));
6344 switch (tree_int_cst_sgn (arg1))
6346 case -1:
6347 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6348 lo = prod;
6349 break;
6351 case 0:
6352 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6353 lo = tmp;
6354 break;
6356 case 1:
6357 neg_overflow = true;
6358 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6359 hi = prod;
6360 break;
6362 default:
6363 gcc_unreachable ();
6367 switch (code)
6369 case EQ_EXPR:
6370 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6371 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6372 if (TREE_OVERFLOW (hi))
6373 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6374 if (TREE_OVERFLOW (lo))
6375 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6376 return build_range_check (loc, type, arg00, 1, lo, hi);
6378 case NE_EXPR:
6379 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6380 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6381 if (TREE_OVERFLOW (hi))
6382 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6383 if (TREE_OVERFLOW (lo))
6384 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6385 return build_range_check (loc, type, arg00, 0, lo, hi);
6387 case LT_EXPR:
6388 if (TREE_OVERFLOW (lo))
6390 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6391 return omit_one_operand_loc (loc, type, tmp, arg00);
6393 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6395 case LE_EXPR:
6396 if (TREE_OVERFLOW (hi))
6398 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6399 return omit_one_operand_loc (loc, type, tmp, arg00);
6401 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6403 case GT_EXPR:
6404 if (TREE_OVERFLOW (hi))
6406 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6407 return omit_one_operand_loc (loc, type, tmp, arg00);
6409 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6411 case GE_EXPR:
6412 if (TREE_OVERFLOW (lo))
6414 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6415 return omit_one_operand_loc (loc, type, tmp, arg00);
6417 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6419 default:
6420 break;
6423 return NULL_TREE;
6427 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6428 equality/inequality test, then return a simplified form of the test
6429 using a sign testing. Otherwise return NULL. TYPE is the desired
6430 result type. */
6432 static tree
6433 fold_single_bit_test_into_sign_test (location_t loc,
6434 enum tree_code code, tree arg0, tree arg1,
6435 tree result_type)
6437 /* If this is testing a single bit, we can optimize the test. */
6438 if ((code == NE_EXPR || code == EQ_EXPR)
6439 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6440 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6442 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6443 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6444 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6446 if (arg00 != NULL_TREE
6447 /* This is only a win if casting to a signed type is cheap,
6448 i.e. when arg00's type is not a partial mode. */
6449 && TYPE_PRECISION (TREE_TYPE (arg00))
6450 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6452 tree stype = signed_type_for (TREE_TYPE (arg00));
6453 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6454 result_type,
6455 fold_convert_loc (loc, stype, arg00),
6456 build_int_cst (stype, 0));
6460 return NULL_TREE;
6463 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6464 equality/inequality test, then return a simplified form of
6465 the test using shifts and logical operations. Otherwise return
6466 NULL. TYPE is the desired result type. */
6468 tree
6469 fold_single_bit_test (location_t loc, enum tree_code code,
6470 tree arg0, tree arg1, tree result_type)
6472 /* If this is testing a single bit, we can optimize the test. */
6473 if ((code == NE_EXPR || code == EQ_EXPR)
6474 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6475 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6477 tree inner = TREE_OPERAND (arg0, 0);
6478 tree type = TREE_TYPE (arg0);
6479 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6480 enum machine_mode operand_mode = TYPE_MODE (type);
6481 int ops_unsigned;
6482 tree signed_type, unsigned_type, intermediate_type;
6483 tree tem, one;
6485 /* First, see if we can fold the single bit test into a sign-bit
6486 test. */
6487 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6488 result_type);
6489 if (tem)
6490 return tem;
6492 /* Otherwise we have (A & C) != 0 where C is a single bit,
6493 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6494 Similarly for (A & C) == 0. */
6496 /* If INNER is a right shift of a constant and it plus BITNUM does
6497 not overflow, adjust BITNUM and INNER. */
6498 if (TREE_CODE (inner) == RSHIFT_EXPR
6499 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6500 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6501 && bitnum < TYPE_PRECISION (type)
6502 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6503 bitnum - TYPE_PRECISION (type)))
6505 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6506 inner = TREE_OPERAND (inner, 0);
6509 /* If we are going to be able to omit the AND below, we must do our
6510 operations as unsigned. If we must use the AND, we have a choice.
6511 Normally unsigned is faster, but for some machines signed is. */
6512 #ifdef LOAD_EXTEND_OP
6513 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6514 && !flag_syntax_only) ? 0 : 1;
6515 #else
6516 ops_unsigned = 1;
6517 #endif
6519 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6520 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6521 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6522 inner = fold_convert_loc (loc, intermediate_type, inner);
6524 if (bitnum != 0)
6525 inner = build2 (RSHIFT_EXPR, intermediate_type,
6526 inner, size_int (bitnum));
6528 one = build_int_cst (intermediate_type, 1);
6530 if (code == EQ_EXPR)
6531 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6533 /* Put the AND last so it can combine with more things. */
6534 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6536 /* Make sure to return the proper type. */
6537 inner = fold_convert_loc (loc, result_type, inner);
6539 return inner;
6541 return NULL_TREE;
6544 /* Check whether we are allowed to reorder operands arg0 and arg1,
6545 such that the evaluation of arg1 occurs before arg0. */
6547 static bool
6548 reorder_operands_p (const_tree arg0, const_tree arg1)
6550 if (! flag_evaluation_order)
6551 return true;
6552 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6553 return true;
6554 return ! TREE_SIDE_EFFECTS (arg0)
6555 && ! TREE_SIDE_EFFECTS (arg1);
6558 /* Test whether it is preferable two swap two operands, ARG0 and
6559 ARG1, for example because ARG0 is an integer constant and ARG1
6560 isn't. If REORDER is true, only recommend swapping if we can
6561 evaluate the operands in reverse order. */
6563 bool
6564 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6566 STRIP_SIGN_NOPS (arg0);
6567 STRIP_SIGN_NOPS (arg1);
6569 if (TREE_CODE (arg1) == INTEGER_CST)
6570 return 0;
6571 if (TREE_CODE (arg0) == INTEGER_CST)
6572 return 1;
6574 if (TREE_CODE (arg1) == REAL_CST)
6575 return 0;
6576 if (TREE_CODE (arg0) == REAL_CST)
6577 return 1;
6579 if (TREE_CODE (arg1) == FIXED_CST)
6580 return 0;
6581 if (TREE_CODE (arg0) == FIXED_CST)
6582 return 1;
6584 if (TREE_CODE (arg1) == COMPLEX_CST)
6585 return 0;
6586 if (TREE_CODE (arg0) == COMPLEX_CST)
6587 return 1;
6589 if (TREE_CONSTANT (arg1))
6590 return 0;
6591 if (TREE_CONSTANT (arg0))
6592 return 1;
6594 if (optimize_function_for_size_p (cfun))
6595 return 0;
6597 if (reorder && flag_evaluation_order
6598 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6599 return 0;
6601 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6602 for commutative and comparison operators. Ensuring a canonical
6603 form allows the optimizers to find additional redundancies without
6604 having to explicitly check for both orderings. */
6605 if (TREE_CODE (arg0) == SSA_NAME
6606 && TREE_CODE (arg1) == SSA_NAME
6607 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6608 return 1;
6610 /* Put SSA_NAMEs last. */
6611 if (TREE_CODE (arg1) == SSA_NAME)
6612 return 0;
6613 if (TREE_CODE (arg0) == SSA_NAME)
6614 return 1;
6616 /* Put variables last. */
6617 if (DECL_P (arg1))
6618 return 0;
6619 if (DECL_P (arg0))
6620 return 1;
6622 return 0;
6625 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6626 ARG0 is extended to a wider type. */
6628 static tree
6629 fold_widened_comparison (location_t loc, enum tree_code code,
6630 tree type, tree arg0, tree arg1)
6632 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6633 tree arg1_unw;
6634 tree shorter_type, outer_type;
6635 tree min, max;
6636 bool above, below;
6638 if (arg0_unw == arg0)
6639 return NULL_TREE;
6640 shorter_type = TREE_TYPE (arg0_unw);
6642 #ifdef HAVE_canonicalize_funcptr_for_compare
6643 /* Disable this optimization if we're casting a function pointer
6644 type on targets that require function pointer canonicalization. */
6645 if (HAVE_canonicalize_funcptr_for_compare
6646 && TREE_CODE (shorter_type) == POINTER_TYPE
6647 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6648 return NULL_TREE;
6649 #endif
6651 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6652 return NULL_TREE;
6654 arg1_unw = get_unwidened (arg1, NULL_TREE);
6656 /* If possible, express the comparison in the shorter mode. */
6657 if ((code == EQ_EXPR || code == NE_EXPR
6658 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6659 && (TREE_TYPE (arg1_unw) == shorter_type
6660 || ((TYPE_PRECISION (shorter_type)
6661 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6662 && (TYPE_UNSIGNED (shorter_type)
6663 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6664 || (TREE_CODE (arg1_unw) == INTEGER_CST
6665 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6666 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6667 && int_fits_type_p (arg1_unw, shorter_type))))
6668 return fold_build2_loc (loc, code, type, arg0_unw,
6669 fold_convert_loc (loc, shorter_type, arg1_unw));
6671 if (TREE_CODE (arg1_unw) != INTEGER_CST
6672 || TREE_CODE (shorter_type) != INTEGER_TYPE
6673 || !int_fits_type_p (arg1_unw, shorter_type))
6674 return NULL_TREE;
6676 /* If we are comparing with the integer that does not fit into the range
6677 of the shorter type, the result is known. */
6678 outer_type = TREE_TYPE (arg1_unw);
6679 min = lower_bound_in_type (outer_type, shorter_type);
6680 max = upper_bound_in_type (outer_type, shorter_type);
6682 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6683 max, arg1_unw));
6684 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6685 arg1_unw, min));
6687 switch (code)
6689 case EQ_EXPR:
6690 if (above || below)
6691 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6692 break;
6694 case NE_EXPR:
6695 if (above || below)
6696 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6697 break;
6699 case LT_EXPR:
6700 case LE_EXPR:
6701 if (above)
6702 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6703 else if (below)
6704 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6706 case GT_EXPR:
6707 case GE_EXPR:
6708 if (above)
6709 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6710 else if (below)
6711 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6713 default:
6714 break;
6717 return NULL_TREE;
6720 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6721 ARG0 just the signedness is changed. */
6723 static tree
6724 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6725 tree arg0, tree arg1)
6727 tree arg0_inner;
6728 tree inner_type, outer_type;
6730 if (!CONVERT_EXPR_P (arg0))
6731 return NULL_TREE;
6733 outer_type = TREE_TYPE (arg0);
6734 arg0_inner = TREE_OPERAND (arg0, 0);
6735 inner_type = TREE_TYPE (arg0_inner);
6737 #ifdef HAVE_canonicalize_funcptr_for_compare
6738 /* Disable this optimization if we're casting a function pointer
6739 type on targets that require function pointer canonicalization. */
6740 if (HAVE_canonicalize_funcptr_for_compare
6741 && TREE_CODE (inner_type) == POINTER_TYPE
6742 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6743 return NULL_TREE;
6744 #endif
6746 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6747 return NULL_TREE;
6749 if (TREE_CODE (arg1) != INTEGER_CST
6750 && !(CONVERT_EXPR_P (arg1)
6751 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6752 return NULL_TREE;
6754 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6755 && code != NE_EXPR
6756 && code != EQ_EXPR)
6757 return NULL_TREE;
6759 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6760 return NULL_TREE;
6762 if (TREE_CODE (arg1) == INTEGER_CST)
6763 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6764 0, TREE_OVERFLOW (arg1));
6765 else
6766 arg1 = fold_convert_loc (loc, inner_type, arg1);
6768 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6771 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6772 step of the array. Reconstructs s and delta in the case of s *
6773 delta being an integer constant (and thus already folded). ADDR is
6774 the address. MULT is the multiplicative expression. If the
6775 function succeeds, the new address expression is returned.
6776 Otherwise NULL_TREE is returned. LOC is the location of the
6777 resulting expression. */
6779 static tree
6780 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6782 tree s, delta, step;
6783 tree ref = TREE_OPERAND (addr, 0), pref;
6784 tree ret, pos;
6785 tree itype;
6786 bool mdim = false;
6788 /* Strip the nops that might be added when converting op1 to sizetype. */
6789 STRIP_NOPS (op1);
6791 /* Canonicalize op1 into a possibly non-constant delta
6792 and an INTEGER_CST s. */
6793 if (TREE_CODE (op1) == MULT_EXPR)
6795 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6797 STRIP_NOPS (arg0);
6798 STRIP_NOPS (arg1);
6800 if (TREE_CODE (arg0) == INTEGER_CST)
6802 s = arg0;
6803 delta = arg1;
6805 else if (TREE_CODE (arg1) == INTEGER_CST)
6807 s = arg1;
6808 delta = arg0;
6810 else
6811 return NULL_TREE;
6813 else if (TREE_CODE (op1) == INTEGER_CST)
6815 delta = op1;
6816 s = NULL_TREE;
6818 else
6820 /* Simulate we are delta * 1. */
6821 delta = op1;
6822 s = integer_one_node;
6825 /* Handle &x.array the same as we would handle &x.array[0]. */
6826 if (TREE_CODE (ref) == COMPONENT_REF
6827 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6829 tree domain;
6831 /* Remember if this was a multi-dimensional array. */
6832 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6833 mdim = true;
6835 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6836 if (! domain)
6837 goto cont;
6838 itype = TREE_TYPE (domain);
6840 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6841 if (TREE_CODE (step) != INTEGER_CST)
6842 goto cont;
6844 if (s)
6846 if (! tree_int_cst_equal (step, s))
6847 goto cont;
6849 else
6851 /* Try if delta is a multiple of step. */
6852 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6853 if (! tmp)
6854 goto cont;
6855 delta = tmp;
6858 /* Only fold here if we can verify we do not overflow one
6859 dimension of a multi-dimensional array. */
6860 if (mdim)
6862 tree tmp;
6864 if (!TYPE_MIN_VALUE (domain)
6865 || !TYPE_MAX_VALUE (domain)
6866 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6867 goto cont;
6869 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6870 fold_convert_loc (loc, itype,
6871 TYPE_MIN_VALUE (domain)),
6872 fold_convert_loc (loc, itype, delta));
6873 if (TREE_CODE (tmp) != INTEGER_CST
6874 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6875 goto cont;
6878 /* We found a suitable component reference. */
6880 pref = TREE_OPERAND (addr, 0);
6881 ret = copy_node (pref);
6882 SET_EXPR_LOCATION (ret, loc);
6884 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6885 fold_build2_loc
6886 (loc, PLUS_EXPR, itype,
6887 fold_convert_loc (loc, itype,
6888 TYPE_MIN_VALUE
6889 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6890 fold_convert_loc (loc, itype, delta)),
6891 NULL_TREE, NULL_TREE);
6892 return build_fold_addr_expr_loc (loc, ret);
6895 cont:
6897 for (;; ref = TREE_OPERAND (ref, 0))
6899 if (TREE_CODE (ref) == ARRAY_REF)
6901 tree domain;
6903 /* Remember if this was a multi-dimensional array. */
6904 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6905 mdim = true;
6907 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6908 if (! domain)
6909 continue;
6910 itype = TREE_TYPE (domain);
6912 step = array_ref_element_size (ref);
6913 if (TREE_CODE (step) != INTEGER_CST)
6914 continue;
6916 if (s)
6918 if (! tree_int_cst_equal (step, s))
6919 continue;
6921 else
6923 /* Try if delta is a multiple of step. */
6924 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6925 if (! tmp)
6926 continue;
6927 delta = tmp;
6930 /* Only fold here if we can verify we do not overflow one
6931 dimension of a multi-dimensional array. */
6932 if (mdim)
6934 tree tmp;
6936 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6937 || !TYPE_MAX_VALUE (domain)
6938 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6939 continue;
6941 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6942 fold_convert_loc (loc, itype,
6943 TREE_OPERAND (ref, 1)),
6944 fold_convert_loc (loc, itype, delta));
6945 if (!tmp
6946 || TREE_CODE (tmp) != INTEGER_CST
6947 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6948 continue;
6951 break;
6953 else
6954 mdim = false;
6956 if (!handled_component_p (ref))
6957 return NULL_TREE;
6960 /* We found the suitable array reference. So copy everything up to it,
6961 and replace the index. */
6963 pref = TREE_OPERAND (addr, 0);
6964 ret = copy_node (pref);
6965 SET_EXPR_LOCATION (ret, loc);
6966 pos = ret;
6968 while (pref != ref)
6970 pref = TREE_OPERAND (pref, 0);
6971 TREE_OPERAND (pos, 0) = copy_node (pref);
6972 pos = TREE_OPERAND (pos, 0);
6975 TREE_OPERAND (pos, 1)
6976 = fold_build2_loc (loc, PLUS_EXPR, itype,
6977 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6978 fold_convert_loc (loc, itype, delta));
6979 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6983 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6984 means A >= Y && A != MAX, but in this case we know that
6985 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6987 static tree
6988 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6990 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6992 if (TREE_CODE (bound) == LT_EXPR)
6993 a = TREE_OPERAND (bound, 0);
6994 else if (TREE_CODE (bound) == GT_EXPR)
6995 a = TREE_OPERAND (bound, 1);
6996 else
6997 return NULL_TREE;
6999 typea = TREE_TYPE (a);
7000 if (!INTEGRAL_TYPE_P (typea)
7001 && !POINTER_TYPE_P (typea))
7002 return NULL_TREE;
7004 if (TREE_CODE (ineq) == LT_EXPR)
7006 a1 = TREE_OPERAND (ineq, 1);
7007 y = TREE_OPERAND (ineq, 0);
7009 else if (TREE_CODE (ineq) == GT_EXPR)
7011 a1 = TREE_OPERAND (ineq, 0);
7012 y = TREE_OPERAND (ineq, 1);
7014 else
7015 return NULL_TREE;
7017 if (TREE_TYPE (a1) != typea)
7018 return NULL_TREE;
7020 if (POINTER_TYPE_P (typea))
7022 /* Convert the pointer types into integer before taking the difference. */
7023 tree ta = fold_convert_loc (loc, ssizetype, a);
7024 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7025 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7027 else
7028 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7030 if (!diff || !integer_onep (diff))
7031 return NULL_TREE;
7033 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7036 /* Fold a sum or difference of at least one multiplication.
7037 Returns the folded tree or NULL if no simplification could be made. */
7039 static tree
7040 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7041 tree arg0, tree arg1)
7043 tree arg00, arg01, arg10, arg11;
7044 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7046 /* (A * C) +- (B * C) -> (A+-B) * C.
7047 (A * C) +- A -> A * (C+-1).
7048 We are most concerned about the case where C is a constant,
7049 but other combinations show up during loop reduction. Since
7050 it is not difficult, try all four possibilities. */
7052 if (TREE_CODE (arg0) == MULT_EXPR)
7054 arg00 = TREE_OPERAND (arg0, 0);
7055 arg01 = TREE_OPERAND (arg0, 1);
7057 else if (TREE_CODE (arg0) == INTEGER_CST)
7059 arg00 = build_one_cst (type);
7060 arg01 = arg0;
7062 else
7064 /* We cannot generate constant 1 for fract. */
7065 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7066 return NULL_TREE;
7067 arg00 = arg0;
7068 arg01 = build_one_cst (type);
7070 if (TREE_CODE (arg1) == MULT_EXPR)
7072 arg10 = TREE_OPERAND (arg1, 0);
7073 arg11 = TREE_OPERAND (arg1, 1);
7075 else if (TREE_CODE (arg1) == INTEGER_CST)
7077 arg10 = build_one_cst (type);
7078 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7079 the purpose of this canonicalization. */
7080 if (TREE_INT_CST_HIGH (arg1) == -1
7081 && negate_expr_p (arg1)
7082 && code == PLUS_EXPR)
7084 arg11 = negate_expr (arg1);
7085 code = MINUS_EXPR;
7087 else
7088 arg11 = arg1;
7090 else
7092 /* We cannot generate constant 1 for fract. */
7093 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7094 return NULL_TREE;
7095 arg10 = arg1;
7096 arg11 = build_one_cst (type);
7098 same = NULL_TREE;
7100 if (operand_equal_p (arg01, arg11, 0))
7101 same = arg01, alt0 = arg00, alt1 = arg10;
7102 else if (operand_equal_p (arg00, arg10, 0))
7103 same = arg00, alt0 = arg01, alt1 = arg11;
7104 else if (operand_equal_p (arg00, arg11, 0))
7105 same = arg00, alt0 = arg01, alt1 = arg10;
7106 else if (operand_equal_p (arg01, arg10, 0))
7107 same = arg01, alt0 = arg00, alt1 = arg11;
7109 /* No identical multiplicands; see if we can find a common
7110 power-of-two factor in non-power-of-two multiplies. This
7111 can help in multi-dimensional array access. */
7112 else if (host_integerp (arg01, 0)
7113 && host_integerp (arg11, 0))
7115 HOST_WIDE_INT int01, int11, tmp;
7116 bool swap = false;
7117 tree maybe_same;
7118 int01 = TREE_INT_CST_LOW (arg01);
7119 int11 = TREE_INT_CST_LOW (arg11);
7121 /* Move min of absolute values to int11. */
7122 if (absu_hwi (int01) < absu_hwi (int11))
7124 tmp = int01, int01 = int11, int11 = tmp;
7125 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7126 maybe_same = arg01;
7127 swap = true;
7129 else
7130 maybe_same = arg11;
7132 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7133 /* The remainder should not be a constant, otherwise we
7134 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7135 increased the number of multiplications necessary. */
7136 && TREE_CODE (arg10) != INTEGER_CST)
7138 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7139 build_int_cst (TREE_TYPE (arg00),
7140 int01 / int11));
7141 alt1 = arg10;
7142 same = maybe_same;
7143 if (swap)
7144 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7148 if (same)
7149 return fold_build2_loc (loc, MULT_EXPR, type,
7150 fold_build2_loc (loc, code, type,
7151 fold_convert_loc (loc, type, alt0),
7152 fold_convert_loc (loc, type, alt1)),
7153 fold_convert_loc (loc, type, same));
7155 return NULL_TREE;
7158 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7159 specified by EXPR into the buffer PTR of length LEN bytes.
7160 Return the number of bytes placed in the buffer, or zero
7161 upon failure. */
7163 static int
7164 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7166 tree type = TREE_TYPE (expr);
7167 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7168 int byte, offset, word, words;
7169 unsigned char value;
7171 if (total_bytes > len)
7172 return 0;
7173 words = total_bytes / UNITS_PER_WORD;
7175 for (byte = 0; byte < total_bytes; byte++)
7177 int bitpos = byte * BITS_PER_UNIT;
7178 if (bitpos < HOST_BITS_PER_WIDE_INT)
7179 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7180 else
7181 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7182 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7184 if (total_bytes > UNITS_PER_WORD)
7186 word = byte / UNITS_PER_WORD;
7187 if (WORDS_BIG_ENDIAN)
7188 word = (words - 1) - word;
7189 offset = word * UNITS_PER_WORD;
7190 if (BYTES_BIG_ENDIAN)
7191 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7192 else
7193 offset += byte % UNITS_PER_WORD;
7195 else
7196 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7197 ptr[offset] = value;
7199 return total_bytes;
7203 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7204 specified by EXPR into the buffer PTR of length LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero
7206 upon failure. */
7208 static int
7209 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7211 tree type = TREE_TYPE (expr);
7212 enum machine_mode mode = TYPE_MODE (type);
7213 int total_bytes = GET_MODE_SIZE (mode);
7214 FIXED_VALUE_TYPE value;
7215 tree i_value, i_type;
7217 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7218 return 0;
7220 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7222 if (NULL_TREE == i_type
7223 || TYPE_PRECISION (i_type) != total_bytes)
7224 return 0;
7226 value = TREE_FIXED_CST (expr);
7227 i_value = double_int_to_tree (i_type, value.data);
7229 return native_encode_int (i_value, ptr, len);
7233 /* Subroutine of native_encode_expr. Encode the REAL_CST
7234 specified by EXPR into the buffer PTR of length LEN bytes.
7235 Return the number of bytes placed in the buffer, or zero
7236 upon failure. */
7238 static int
7239 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7241 tree type = TREE_TYPE (expr);
7242 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7243 int byte, offset, word, words, bitpos;
7244 unsigned char value;
7246 /* There are always 32 bits in each long, no matter the size of
7247 the hosts long. We handle floating point representations with
7248 up to 192 bits. */
7249 long tmp[6];
7251 if (total_bytes > len)
7252 return 0;
7253 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7255 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7257 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7258 bitpos += BITS_PER_UNIT)
7260 byte = (bitpos / BITS_PER_UNIT) & 3;
7261 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7263 if (UNITS_PER_WORD < 4)
7265 word = byte / UNITS_PER_WORD;
7266 if (WORDS_BIG_ENDIAN)
7267 word = (words - 1) - word;
7268 offset = word * UNITS_PER_WORD;
7269 if (BYTES_BIG_ENDIAN)
7270 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7271 else
7272 offset += byte % UNITS_PER_WORD;
7274 else
7275 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7276 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7278 return total_bytes;
7281 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7282 specified by EXPR into the buffer PTR of length LEN bytes.
7283 Return the number of bytes placed in the buffer, or zero
7284 upon failure. */
7286 static int
7287 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7289 int rsize, isize;
7290 tree part;
7292 part = TREE_REALPART (expr);
7293 rsize = native_encode_expr (part, ptr, len);
7294 if (rsize == 0)
7295 return 0;
7296 part = TREE_IMAGPART (expr);
7297 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7298 if (isize != rsize)
7299 return 0;
7300 return rsize + isize;
7304 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7305 specified by EXPR into the buffer PTR of length LEN bytes.
7306 Return the number of bytes placed in the buffer, or zero
7307 upon failure. */
7309 static int
7310 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7312 unsigned i, count;
7313 int size, offset;
7314 tree itype, elem;
7316 offset = 0;
7317 count = VECTOR_CST_NELTS (expr);
7318 itype = TREE_TYPE (TREE_TYPE (expr));
7319 size = GET_MODE_SIZE (TYPE_MODE (itype));
7320 for (i = 0; i < count; i++)
7322 elem = VECTOR_CST_ELT (expr, i);
7323 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7324 return 0;
7325 offset += size;
7327 return offset;
7331 /* Subroutine of native_encode_expr. Encode the STRING_CST
7332 specified by EXPR into the buffer PTR of length LEN bytes.
7333 Return the number of bytes placed in the buffer, or zero
7334 upon failure. */
7336 static int
7337 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7339 tree type = TREE_TYPE (expr);
7340 HOST_WIDE_INT total_bytes;
7342 if (TREE_CODE (type) != ARRAY_TYPE
7343 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7344 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7345 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7346 return 0;
7347 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7348 if (total_bytes > len)
7349 return 0;
7350 if (TREE_STRING_LENGTH (expr) < total_bytes)
7352 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7353 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7354 total_bytes - TREE_STRING_LENGTH (expr));
7356 else
7357 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7358 return total_bytes;
7362 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7363 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7364 buffer PTR of length LEN bytes. Return the number of bytes
7365 placed in the buffer, or zero upon failure. */
7368 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7370 switch (TREE_CODE (expr))
7372 case INTEGER_CST:
7373 return native_encode_int (expr, ptr, len);
7375 case REAL_CST:
7376 return native_encode_real (expr, ptr, len);
7378 case FIXED_CST:
7379 return native_encode_fixed (expr, ptr, len);
7381 case COMPLEX_CST:
7382 return native_encode_complex (expr, ptr, len);
7384 case VECTOR_CST:
7385 return native_encode_vector (expr, ptr, len);
7387 case STRING_CST:
7388 return native_encode_string (expr, ptr, len);
7390 default:
7391 return 0;
7396 /* Subroutine of native_interpret_expr. Interpret the contents of
7397 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7398 If the buffer cannot be interpreted, return NULL_TREE. */
7400 static tree
7401 native_interpret_int (tree type, const unsigned char *ptr, int len)
7403 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7404 double_int result;
7406 if (total_bytes > len
7407 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7408 return NULL_TREE;
7410 result = double_int::from_buffer (ptr, total_bytes);
7412 return double_int_to_tree (type, result);
7416 /* Subroutine of native_interpret_expr. Interpret the contents of
7417 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7418 If the buffer cannot be interpreted, return NULL_TREE. */
7420 static tree
7421 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7423 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7424 double_int result;
7425 FIXED_VALUE_TYPE fixed_value;
7427 if (total_bytes > len
7428 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7429 return NULL_TREE;
7431 result = double_int::from_buffer (ptr, total_bytes);
7432 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7434 return build_fixed (type, fixed_value);
7438 /* Subroutine of native_interpret_expr. Interpret the contents of
7439 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7440 If the buffer cannot be interpreted, return NULL_TREE. */
7442 static tree
7443 native_interpret_real (tree type, const unsigned char *ptr, int len)
7445 enum machine_mode mode = TYPE_MODE (type);
7446 int total_bytes = GET_MODE_SIZE (mode);
7447 int byte, offset, word, words, bitpos;
7448 unsigned char value;
7449 /* There are always 32 bits in each long, no matter the size of
7450 the hosts long. We handle floating point representations with
7451 up to 192 bits. */
7452 REAL_VALUE_TYPE r;
7453 long tmp[6];
7455 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7456 if (total_bytes > len || total_bytes > 24)
7457 return NULL_TREE;
7458 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7460 memset (tmp, 0, sizeof (tmp));
7461 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7462 bitpos += BITS_PER_UNIT)
7464 byte = (bitpos / BITS_PER_UNIT) & 3;
7465 if (UNITS_PER_WORD < 4)
7467 word = byte / UNITS_PER_WORD;
7468 if (WORDS_BIG_ENDIAN)
7469 word = (words - 1) - word;
7470 offset = word * UNITS_PER_WORD;
7471 if (BYTES_BIG_ENDIAN)
7472 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7473 else
7474 offset += byte % UNITS_PER_WORD;
7476 else
7477 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7478 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7480 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7483 real_from_target (&r, tmp, mode);
7484 return build_real (type, r);
7488 /* Subroutine of native_interpret_expr. Interpret the contents of
7489 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7490 If the buffer cannot be interpreted, return NULL_TREE. */
7492 static tree
7493 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7495 tree etype, rpart, ipart;
7496 int size;
7498 etype = TREE_TYPE (type);
7499 size = GET_MODE_SIZE (TYPE_MODE (etype));
7500 if (size * 2 > len)
7501 return NULL_TREE;
7502 rpart = native_interpret_expr (etype, ptr, size);
7503 if (!rpart)
7504 return NULL_TREE;
7505 ipart = native_interpret_expr (etype, ptr+size, size);
7506 if (!ipart)
7507 return NULL_TREE;
7508 return build_complex (type, rpart, ipart);
7512 /* Subroutine of native_interpret_expr. Interpret the contents of
7513 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7514 If the buffer cannot be interpreted, return NULL_TREE. */
7516 static tree
7517 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7519 tree etype, elem;
7520 int i, size, count;
7521 tree *elements;
7523 etype = TREE_TYPE (type);
7524 size = GET_MODE_SIZE (TYPE_MODE (etype));
7525 count = TYPE_VECTOR_SUBPARTS (type);
7526 if (size * count > len)
7527 return NULL_TREE;
7529 elements = XALLOCAVEC (tree, count);
7530 for (i = count - 1; i >= 0; i--)
7532 elem = native_interpret_expr (etype, ptr+(i*size), size);
7533 if (!elem)
7534 return NULL_TREE;
7535 elements[i] = elem;
7537 return build_vector (type, elements);
7541 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7542 the buffer PTR of length LEN as a constant of type TYPE. For
7543 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7544 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7545 return NULL_TREE. */
7547 tree
7548 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7550 switch (TREE_CODE (type))
7552 case INTEGER_TYPE:
7553 case ENUMERAL_TYPE:
7554 case BOOLEAN_TYPE:
7555 case POINTER_TYPE:
7556 case REFERENCE_TYPE:
7557 return native_interpret_int (type, ptr, len);
7559 case REAL_TYPE:
7560 return native_interpret_real (type, ptr, len);
7562 case FIXED_POINT_TYPE:
7563 return native_interpret_fixed (type, ptr, len);
7565 case COMPLEX_TYPE:
7566 return native_interpret_complex (type, ptr, len);
7568 case VECTOR_TYPE:
7569 return native_interpret_vector (type, ptr, len);
7571 default:
7572 return NULL_TREE;
7576 /* Returns true if we can interpret the contents of a native encoding
7577 as TYPE. */
7579 static bool
7580 can_native_interpret_type_p (tree type)
7582 switch (TREE_CODE (type))
7584 case INTEGER_TYPE:
7585 case ENUMERAL_TYPE:
7586 case BOOLEAN_TYPE:
7587 case POINTER_TYPE:
7588 case REFERENCE_TYPE:
7589 case FIXED_POINT_TYPE:
7590 case REAL_TYPE:
7591 case COMPLEX_TYPE:
7592 case VECTOR_TYPE:
7593 return true;
7594 default:
7595 return false;
7599 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7600 TYPE at compile-time. If we're unable to perform the conversion
7601 return NULL_TREE. */
7603 static tree
7604 fold_view_convert_expr (tree type, tree expr)
7606 /* We support up to 512-bit values (for V8DFmode). */
7607 unsigned char buffer[64];
7608 int len;
7610 /* Check that the host and target are sane. */
7611 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7612 return NULL_TREE;
7614 len = native_encode_expr (expr, buffer, sizeof (buffer));
7615 if (len == 0)
7616 return NULL_TREE;
7618 return native_interpret_expr (type, buffer, len);
7621 /* Build an expression for the address of T. Folds away INDIRECT_REF
7622 to avoid confusing the gimplify process. */
7624 tree
7625 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7627 /* The size of the object is not relevant when talking about its address. */
7628 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7629 t = TREE_OPERAND (t, 0);
7631 if (TREE_CODE (t) == INDIRECT_REF)
7633 t = TREE_OPERAND (t, 0);
7635 if (TREE_TYPE (t) != ptrtype)
7636 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7638 else if (TREE_CODE (t) == MEM_REF
7639 && integer_zerop (TREE_OPERAND (t, 1)))
7640 return TREE_OPERAND (t, 0);
7641 else if (TREE_CODE (t) == MEM_REF
7642 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7643 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7644 TREE_OPERAND (t, 0),
7645 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7646 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7648 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7650 if (TREE_TYPE (t) != ptrtype)
7651 t = fold_convert_loc (loc, ptrtype, t);
7653 else
7654 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7656 return t;
7659 /* Build an expression for the address of T. */
7661 tree
7662 build_fold_addr_expr_loc (location_t loc, tree t)
7664 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7666 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7669 static bool vec_cst_ctor_to_array (tree, tree *);
7671 /* Fold a unary expression of code CODE and type TYPE with operand
7672 OP0. Return the folded expression if folding is successful.
7673 Otherwise, return NULL_TREE. */
7675 tree
7676 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7678 tree tem;
7679 tree arg0;
7680 enum tree_code_class kind = TREE_CODE_CLASS (code);
7682 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7683 && TREE_CODE_LENGTH (code) == 1);
7685 arg0 = op0;
7686 if (arg0)
7688 if (CONVERT_EXPR_CODE_P (code)
7689 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7691 /* Don't use STRIP_NOPS, because signedness of argument type
7692 matters. */
7693 STRIP_SIGN_NOPS (arg0);
7695 else
7697 /* Strip any conversions that don't change the mode. This
7698 is safe for every expression, except for a comparison
7699 expression because its signedness is derived from its
7700 operands.
7702 Note that this is done as an internal manipulation within
7703 the constant folder, in order to find the simplest
7704 representation of the arguments so that their form can be
7705 studied. In any cases, the appropriate type conversions
7706 should be put back in the tree that will get out of the
7707 constant folder. */
7708 STRIP_NOPS (arg0);
7712 if (TREE_CODE_CLASS (code) == tcc_unary)
7714 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7715 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7716 fold_build1_loc (loc, code, type,
7717 fold_convert_loc (loc, TREE_TYPE (op0),
7718 TREE_OPERAND (arg0, 1))));
7719 else if (TREE_CODE (arg0) == COND_EXPR)
7721 tree arg01 = TREE_OPERAND (arg0, 1);
7722 tree arg02 = TREE_OPERAND (arg0, 2);
7723 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7724 arg01 = fold_build1_loc (loc, code, type,
7725 fold_convert_loc (loc,
7726 TREE_TYPE (op0), arg01));
7727 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7728 arg02 = fold_build1_loc (loc, code, type,
7729 fold_convert_loc (loc,
7730 TREE_TYPE (op0), arg02));
7731 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7732 arg01, arg02);
7734 /* If this was a conversion, and all we did was to move into
7735 inside the COND_EXPR, bring it back out. But leave it if
7736 it is a conversion from integer to integer and the
7737 result precision is no wider than a word since such a
7738 conversion is cheap and may be optimized away by combine,
7739 while it couldn't if it were outside the COND_EXPR. Then return
7740 so we don't get into an infinite recursion loop taking the
7741 conversion out and then back in. */
7743 if ((CONVERT_EXPR_CODE_P (code)
7744 || code == NON_LVALUE_EXPR)
7745 && TREE_CODE (tem) == COND_EXPR
7746 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7747 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7748 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7749 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7750 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7751 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7752 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7753 && (INTEGRAL_TYPE_P
7754 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7755 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7756 || flag_syntax_only))
7757 tem = build1_loc (loc, code, type,
7758 build3 (COND_EXPR,
7759 TREE_TYPE (TREE_OPERAND
7760 (TREE_OPERAND (tem, 1), 0)),
7761 TREE_OPERAND (tem, 0),
7762 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7763 TREE_OPERAND (TREE_OPERAND (tem, 2),
7764 0)));
7765 return tem;
7769 switch (code)
7771 case PAREN_EXPR:
7772 /* Re-association barriers around constants and other re-association
7773 barriers can be removed. */
7774 if (CONSTANT_CLASS_P (op0)
7775 || TREE_CODE (op0) == PAREN_EXPR)
7776 return fold_convert_loc (loc, type, op0);
7777 return NULL_TREE;
7779 CASE_CONVERT:
7780 case FLOAT_EXPR:
7781 case FIX_TRUNC_EXPR:
7782 if (TREE_TYPE (op0) == type)
7783 return op0;
7785 if (COMPARISON_CLASS_P (op0))
7787 /* If we have (type) (a CMP b) and type is an integral type, return
7788 new expression involving the new type. Canonicalize
7789 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7790 non-integral type.
7791 Do not fold the result as that would not simplify further, also
7792 folding again results in recursions. */
7793 if (TREE_CODE (type) == BOOLEAN_TYPE)
7794 return build2_loc (loc, TREE_CODE (op0), type,
7795 TREE_OPERAND (op0, 0),
7796 TREE_OPERAND (op0, 1));
7797 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7798 && TREE_CODE (type) != VECTOR_TYPE)
7799 return build3_loc (loc, COND_EXPR, type, op0,
7800 constant_boolean_node (true, type),
7801 constant_boolean_node (false, type));
7804 /* Handle cases of two conversions in a row. */
7805 if (CONVERT_EXPR_P (op0))
7807 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7808 tree inter_type = TREE_TYPE (op0);
7809 int inside_int = INTEGRAL_TYPE_P (inside_type);
7810 int inside_ptr = POINTER_TYPE_P (inside_type);
7811 int inside_float = FLOAT_TYPE_P (inside_type);
7812 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7813 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7814 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7815 int inter_int = INTEGRAL_TYPE_P (inter_type);
7816 int inter_ptr = POINTER_TYPE_P (inter_type);
7817 int inter_float = FLOAT_TYPE_P (inter_type);
7818 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7819 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7820 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7821 int final_int = INTEGRAL_TYPE_P (type);
7822 int final_ptr = POINTER_TYPE_P (type);
7823 int final_float = FLOAT_TYPE_P (type);
7824 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7825 unsigned int final_prec = TYPE_PRECISION (type);
7826 int final_unsignedp = TYPE_UNSIGNED (type);
7828 /* In addition to the cases of two conversions in a row
7829 handled below, if we are converting something to its own
7830 type via an object of identical or wider precision, neither
7831 conversion is needed. */
7832 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7833 && (((inter_int || inter_ptr) && final_int)
7834 || (inter_float && final_float))
7835 && inter_prec >= final_prec)
7836 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7838 /* Likewise, if the intermediate and initial types are either both
7839 float or both integer, we don't need the middle conversion if the
7840 former is wider than the latter and doesn't change the signedness
7841 (for integers). Avoid this if the final type is a pointer since
7842 then we sometimes need the middle conversion. Likewise if the
7843 final type has a precision not equal to the size of its mode. */
7844 if (((inter_int && inside_int)
7845 || (inter_float && inside_float)
7846 || (inter_vec && inside_vec))
7847 && inter_prec >= inside_prec
7848 && (inter_float || inter_vec
7849 || inter_unsignedp == inside_unsignedp)
7850 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7851 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7852 && ! final_ptr
7853 && (! final_vec || inter_prec == inside_prec))
7854 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7856 /* If we have a sign-extension of a zero-extended value, we can
7857 replace that by a single zero-extension. Likewise if the
7858 final conversion does not change precision we can drop the
7859 intermediate conversion. */
7860 if (inside_int && inter_int && final_int
7861 && ((inside_prec < inter_prec && inter_prec < final_prec
7862 && inside_unsignedp && !inter_unsignedp)
7863 || final_prec == inter_prec))
7864 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7866 /* Two conversions in a row are not needed unless:
7867 - some conversion is floating-point (overstrict for now), or
7868 - some conversion is a vector (overstrict for now), or
7869 - the intermediate type is narrower than both initial and
7870 final, or
7871 - the intermediate type and innermost type differ in signedness,
7872 and the outermost type is wider than the intermediate, or
7873 - the initial type is a pointer type and the precisions of the
7874 intermediate and final types differ, or
7875 - the final type is a pointer type and the precisions of the
7876 initial and intermediate types differ. */
7877 if (! inside_float && ! inter_float && ! final_float
7878 && ! inside_vec && ! inter_vec && ! final_vec
7879 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7880 && ! (inside_int && inter_int
7881 && inter_unsignedp != inside_unsignedp
7882 && inter_prec < final_prec)
7883 && ((inter_unsignedp && inter_prec > inside_prec)
7884 == (final_unsignedp && final_prec > inter_prec))
7885 && ! (inside_ptr && inter_prec != final_prec)
7886 && ! (final_ptr && inside_prec != inter_prec)
7887 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7888 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7889 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7892 /* Handle (T *)&A.B.C for A being of type T and B and C
7893 living at offset zero. This occurs frequently in
7894 C++ upcasting and then accessing the base. */
7895 if (TREE_CODE (op0) == ADDR_EXPR
7896 && POINTER_TYPE_P (type)
7897 && handled_component_p (TREE_OPERAND (op0, 0)))
7899 HOST_WIDE_INT bitsize, bitpos;
7900 tree offset;
7901 enum machine_mode mode;
7902 int unsignedp, volatilep;
7903 tree base = TREE_OPERAND (op0, 0);
7904 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7905 &mode, &unsignedp, &volatilep, false);
7906 /* If the reference was to a (constant) zero offset, we can use
7907 the address of the base if it has the same base type
7908 as the result type and the pointer type is unqualified. */
7909 if (! offset && bitpos == 0
7910 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7911 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7912 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7913 return fold_convert_loc (loc, type,
7914 build_fold_addr_expr_loc (loc, base));
7917 if (TREE_CODE (op0) == MODIFY_EXPR
7918 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7919 /* Detect assigning a bitfield. */
7920 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7921 && DECL_BIT_FIELD
7922 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7924 /* Don't leave an assignment inside a conversion
7925 unless assigning a bitfield. */
7926 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7927 /* First do the assignment, then return converted constant. */
7928 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7929 TREE_NO_WARNING (tem) = 1;
7930 TREE_USED (tem) = 1;
7931 return tem;
7934 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7935 constants (if x has signed type, the sign bit cannot be set
7936 in c). This folds extension into the BIT_AND_EXPR.
7937 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7938 very likely don't have maximal range for their precision and this
7939 transformation effectively doesn't preserve non-maximal ranges. */
7940 if (TREE_CODE (type) == INTEGER_TYPE
7941 && TREE_CODE (op0) == BIT_AND_EXPR
7942 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7944 tree and_expr = op0;
7945 tree and0 = TREE_OPERAND (and_expr, 0);
7946 tree and1 = TREE_OPERAND (and_expr, 1);
7947 int change = 0;
7949 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7950 || (TYPE_PRECISION (type)
7951 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7952 change = 1;
7953 else if (TYPE_PRECISION (TREE_TYPE (and1))
7954 <= HOST_BITS_PER_WIDE_INT
7955 && host_integerp (and1, 1))
7957 unsigned HOST_WIDE_INT cst;
7959 cst = tree_low_cst (and1, 1);
7960 cst &= (HOST_WIDE_INT) -1
7961 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7962 change = (cst == 0);
7963 #ifdef LOAD_EXTEND_OP
7964 if (change
7965 && !flag_syntax_only
7966 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7967 == ZERO_EXTEND))
7969 tree uns = unsigned_type_for (TREE_TYPE (and0));
7970 and0 = fold_convert_loc (loc, uns, and0);
7971 and1 = fold_convert_loc (loc, uns, and1);
7973 #endif
7975 if (change)
7977 tem = force_fit_type_double (type, tree_to_double_int (and1),
7978 0, TREE_OVERFLOW (and1));
7979 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7980 fold_convert_loc (loc, type, and0), tem);
7984 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7985 when one of the new casts will fold away. Conservatively we assume
7986 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7987 if (POINTER_TYPE_P (type)
7988 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7989 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7990 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7991 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7992 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7994 tree arg00 = TREE_OPERAND (arg0, 0);
7995 tree arg01 = TREE_OPERAND (arg0, 1);
7997 return fold_build_pointer_plus_loc
7998 (loc, fold_convert_loc (loc, type, arg00), arg01);
8001 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8002 of the same precision, and X is an integer type not narrower than
8003 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8004 if (INTEGRAL_TYPE_P (type)
8005 && TREE_CODE (op0) == BIT_NOT_EXPR
8006 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8007 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8008 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8010 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8011 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8012 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8013 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8014 fold_convert_loc (loc, type, tem));
8017 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8018 type of X and Y (integer types only). */
8019 if (INTEGRAL_TYPE_P (type)
8020 && TREE_CODE (op0) == MULT_EXPR
8021 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8022 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8024 /* Be careful not to introduce new overflows. */
8025 tree mult_type;
8026 if (TYPE_OVERFLOW_WRAPS (type))
8027 mult_type = type;
8028 else
8029 mult_type = unsigned_type_for (type);
8031 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8033 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8034 fold_convert_loc (loc, mult_type,
8035 TREE_OPERAND (op0, 0)),
8036 fold_convert_loc (loc, mult_type,
8037 TREE_OPERAND (op0, 1)));
8038 return fold_convert_loc (loc, type, tem);
8042 tem = fold_convert_const (code, type, op0);
8043 return tem ? tem : NULL_TREE;
8045 case ADDR_SPACE_CONVERT_EXPR:
8046 if (integer_zerop (arg0))
8047 return fold_convert_const (code, type, arg0);
8048 return NULL_TREE;
8050 case FIXED_CONVERT_EXPR:
8051 tem = fold_convert_const (code, type, arg0);
8052 return tem ? tem : NULL_TREE;
8054 case VIEW_CONVERT_EXPR:
8055 if (TREE_TYPE (op0) == type)
8056 return op0;
8057 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8058 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8059 type, TREE_OPERAND (op0, 0));
8060 if (TREE_CODE (op0) == MEM_REF)
8061 return fold_build2_loc (loc, MEM_REF, type,
8062 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8064 /* For integral conversions with the same precision or pointer
8065 conversions use a NOP_EXPR instead. */
8066 if ((INTEGRAL_TYPE_P (type)
8067 || POINTER_TYPE_P (type))
8068 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8069 || POINTER_TYPE_P (TREE_TYPE (op0)))
8070 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8071 return fold_convert_loc (loc, type, op0);
8073 /* Strip inner integral conversions that do not change the precision. */
8074 if (CONVERT_EXPR_P (op0)
8075 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8076 || POINTER_TYPE_P (TREE_TYPE (op0)))
8077 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8078 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8079 && (TYPE_PRECISION (TREE_TYPE (op0))
8080 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8081 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8082 type, TREE_OPERAND (op0, 0));
8084 return fold_view_convert_expr (type, op0);
8086 case NEGATE_EXPR:
8087 tem = fold_negate_expr (loc, arg0);
8088 if (tem)
8089 return fold_convert_loc (loc, type, tem);
8090 return NULL_TREE;
8092 case ABS_EXPR:
8093 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8094 return fold_abs_const (arg0, type);
8095 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8096 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8097 /* Convert fabs((double)float) into (double)fabsf(float). */
8098 else if (TREE_CODE (arg0) == NOP_EXPR
8099 && TREE_CODE (type) == REAL_TYPE)
8101 tree targ0 = strip_float_extensions (arg0);
8102 if (targ0 != arg0)
8103 return fold_convert_loc (loc, type,
8104 fold_build1_loc (loc, ABS_EXPR,
8105 TREE_TYPE (targ0),
8106 targ0));
8108 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8109 else if (TREE_CODE (arg0) == ABS_EXPR)
8110 return arg0;
8111 else if (tree_expr_nonnegative_p (arg0))
8112 return arg0;
8114 /* Strip sign ops from argument. */
8115 if (TREE_CODE (type) == REAL_TYPE)
8117 tem = fold_strip_sign_ops (arg0);
8118 if (tem)
8119 return fold_build1_loc (loc, ABS_EXPR, type,
8120 fold_convert_loc (loc, type, tem));
8122 return NULL_TREE;
8124 case CONJ_EXPR:
8125 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8126 return fold_convert_loc (loc, type, arg0);
8127 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8129 tree itype = TREE_TYPE (type);
8130 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8131 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8132 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8133 negate_expr (ipart));
8135 if (TREE_CODE (arg0) == COMPLEX_CST)
8137 tree itype = TREE_TYPE (type);
8138 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8139 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8140 return build_complex (type, rpart, negate_expr (ipart));
8142 if (TREE_CODE (arg0) == CONJ_EXPR)
8143 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8144 return NULL_TREE;
8146 case BIT_NOT_EXPR:
8147 if (TREE_CODE (arg0) == INTEGER_CST)
8148 return fold_not_const (arg0, type);
8149 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8150 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8151 /* Convert ~ (-A) to A - 1. */
8152 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8153 return fold_build2_loc (loc, MINUS_EXPR, type,
8154 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8155 build_int_cst (type, 1));
8156 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8157 else if (INTEGRAL_TYPE_P (type)
8158 && ((TREE_CODE (arg0) == MINUS_EXPR
8159 && integer_onep (TREE_OPERAND (arg0, 1)))
8160 || (TREE_CODE (arg0) == PLUS_EXPR
8161 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8162 return fold_build1_loc (loc, NEGATE_EXPR, type,
8163 fold_convert_loc (loc, type,
8164 TREE_OPERAND (arg0, 0)));
8165 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8166 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8167 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8168 fold_convert_loc (loc, type,
8169 TREE_OPERAND (arg0, 0)))))
8170 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8171 fold_convert_loc (loc, type,
8172 TREE_OPERAND (arg0, 1)));
8173 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8174 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8175 fold_convert_loc (loc, type,
8176 TREE_OPERAND (arg0, 1)))))
8177 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8178 fold_convert_loc (loc, type,
8179 TREE_OPERAND (arg0, 0)), tem);
8180 /* Perform BIT_NOT_EXPR on each element individually. */
8181 else if (TREE_CODE (arg0) == VECTOR_CST)
8183 tree *elements;
8184 tree elem;
8185 unsigned count = VECTOR_CST_NELTS (arg0), i;
8187 elements = XALLOCAVEC (tree, count);
8188 for (i = 0; i < count; i++)
8190 elem = VECTOR_CST_ELT (arg0, i);
8191 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8192 if (elem == NULL_TREE)
8193 break;
8194 elements[i] = elem;
8196 if (i == count)
8197 return build_vector (type, elements);
8200 return NULL_TREE;
8202 case TRUTH_NOT_EXPR:
8203 /* The argument to invert_truthvalue must have Boolean type. */
8204 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8205 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8207 /* Note that the operand of this must be an int
8208 and its values must be 0 or 1.
8209 ("true" is a fixed value perhaps depending on the language,
8210 but we don't handle values other than 1 correctly yet.) */
8211 tem = fold_truth_not_expr (loc, arg0);
8212 if (!tem)
8213 return NULL_TREE;
8214 return fold_convert_loc (loc, type, tem);
8216 case REALPART_EXPR:
8217 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8218 return fold_convert_loc (loc, type, arg0);
8219 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8220 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8221 TREE_OPERAND (arg0, 1));
8222 if (TREE_CODE (arg0) == COMPLEX_CST)
8223 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8224 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8226 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8227 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8228 fold_build1_loc (loc, REALPART_EXPR, itype,
8229 TREE_OPERAND (arg0, 0)),
8230 fold_build1_loc (loc, REALPART_EXPR, itype,
8231 TREE_OPERAND (arg0, 1)));
8232 return fold_convert_loc (loc, type, tem);
8234 if (TREE_CODE (arg0) == CONJ_EXPR)
8236 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8237 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8238 TREE_OPERAND (arg0, 0));
8239 return fold_convert_loc (loc, type, tem);
8241 if (TREE_CODE (arg0) == CALL_EXPR)
8243 tree fn = get_callee_fndecl (arg0);
8244 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8245 switch (DECL_FUNCTION_CODE (fn))
8247 CASE_FLT_FN (BUILT_IN_CEXPI):
8248 fn = mathfn_built_in (type, BUILT_IN_COS);
8249 if (fn)
8250 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8251 break;
8253 default:
8254 break;
8257 return NULL_TREE;
8259 case IMAGPART_EXPR:
8260 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8261 return build_zero_cst (type);
8262 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8263 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8264 TREE_OPERAND (arg0, 0));
8265 if (TREE_CODE (arg0) == COMPLEX_CST)
8266 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8267 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8269 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8270 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8271 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8272 TREE_OPERAND (arg0, 0)),
8273 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8274 TREE_OPERAND (arg0, 1)));
8275 return fold_convert_loc (loc, type, tem);
8277 if (TREE_CODE (arg0) == CONJ_EXPR)
8279 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8280 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8281 return fold_convert_loc (loc, type, negate_expr (tem));
8283 if (TREE_CODE (arg0) == CALL_EXPR)
8285 tree fn = get_callee_fndecl (arg0);
8286 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8287 switch (DECL_FUNCTION_CODE (fn))
8289 CASE_FLT_FN (BUILT_IN_CEXPI):
8290 fn = mathfn_built_in (type, BUILT_IN_SIN);
8291 if (fn)
8292 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8293 break;
8295 default:
8296 break;
8299 return NULL_TREE;
8301 case INDIRECT_REF:
8302 /* Fold *&X to X if X is an lvalue. */
8303 if (TREE_CODE (op0) == ADDR_EXPR)
8305 tree op00 = TREE_OPERAND (op0, 0);
8306 if ((TREE_CODE (op00) == VAR_DECL
8307 || TREE_CODE (op00) == PARM_DECL
8308 || TREE_CODE (op00) == RESULT_DECL)
8309 && !TREE_READONLY (op00))
8310 return op00;
8312 return NULL_TREE;
8314 case VEC_UNPACK_LO_EXPR:
8315 case VEC_UNPACK_HI_EXPR:
8316 case VEC_UNPACK_FLOAT_LO_EXPR:
8317 case VEC_UNPACK_FLOAT_HI_EXPR:
8319 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8320 tree *elts;
8321 enum tree_code subcode;
8323 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8324 if (TREE_CODE (arg0) != VECTOR_CST)
8325 return NULL_TREE;
8327 elts = XALLOCAVEC (tree, nelts * 2);
8328 if (!vec_cst_ctor_to_array (arg0, elts))
8329 return NULL_TREE;
8331 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8332 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8333 elts += nelts;
8335 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8336 subcode = NOP_EXPR;
8337 else
8338 subcode = FLOAT_EXPR;
8340 for (i = 0; i < nelts; i++)
8342 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8343 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8344 return NULL_TREE;
8347 return build_vector (type, elts);
8350 case REDUC_MIN_EXPR:
8351 case REDUC_MAX_EXPR:
8352 case REDUC_PLUS_EXPR:
8354 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8355 tree *elts;
8356 enum tree_code subcode;
8358 if (TREE_CODE (op0) != VECTOR_CST)
8359 return NULL_TREE;
8361 elts = XALLOCAVEC (tree, nelts);
8362 if (!vec_cst_ctor_to_array (op0, elts))
8363 return NULL_TREE;
8365 switch (code)
8367 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8368 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8369 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8370 default: gcc_unreachable ();
8373 for (i = 1; i < nelts; i++)
8375 elts[0] = const_binop (subcode, elts[0], elts[i]);
8376 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8377 return NULL_TREE;
8378 elts[i] = build_zero_cst (TREE_TYPE (type));
8381 return build_vector (type, elts);
8384 default:
8385 return NULL_TREE;
8386 } /* switch (code) */
8390 /* If the operation was a conversion do _not_ mark a resulting constant
8391 with TREE_OVERFLOW if the original constant was not. These conversions
8392 have implementation defined behavior and retaining the TREE_OVERFLOW
8393 flag here would confuse later passes such as VRP. */
8394 tree
8395 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8396 tree type, tree op0)
8398 tree res = fold_unary_loc (loc, code, type, op0);
8399 if (res
8400 && TREE_CODE (res) == INTEGER_CST
8401 && TREE_CODE (op0) == INTEGER_CST
8402 && CONVERT_EXPR_CODE_P (code))
8403 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8405 return res;
8408 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8409 operands OP0 and OP1. LOC is the location of the resulting expression.
8410 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8411 Return the folded expression if folding is successful. Otherwise,
8412 return NULL_TREE. */
8413 static tree
8414 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8415 tree arg0, tree arg1, tree op0, tree op1)
8417 tree tem;
8419 /* We only do these simplifications if we are optimizing. */
8420 if (!optimize)
8421 return NULL_TREE;
8423 /* Check for things like (A || B) && (A || C). We can convert this
8424 to A || (B && C). Note that either operator can be any of the four
8425 truth and/or operations and the transformation will still be
8426 valid. Also note that we only care about order for the
8427 ANDIF and ORIF operators. If B contains side effects, this
8428 might change the truth-value of A. */
8429 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8430 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8431 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8432 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8433 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8434 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8436 tree a00 = TREE_OPERAND (arg0, 0);
8437 tree a01 = TREE_OPERAND (arg0, 1);
8438 tree a10 = TREE_OPERAND (arg1, 0);
8439 tree a11 = TREE_OPERAND (arg1, 1);
8440 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8441 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8442 && (code == TRUTH_AND_EXPR
8443 || code == TRUTH_OR_EXPR));
8445 if (operand_equal_p (a00, a10, 0))
8446 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8447 fold_build2_loc (loc, code, type, a01, a11));
8448 else if (commutative && operand_equal_p (a00, a11, 0))
8449 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8450 fold_build2_loc (loc, code, type, a01, a10));
8451 else if (commutative && operand_equal_p (a01, a10, 0))
8452 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8453 fold_build2_loc (loc, code, type, a00, a11));
8455 /* This case if tricky because we must either have commutative
8456 operators or else A10 must not have side-effects. */
8458 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8459 && operand_equal_p (a01, a11, 0))
8460 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8461 fold_build2_loc (loc, code, type, a00, a10),
8462 a01);
8465 /* See if we can build a range comparison. */
8466 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8467 return tem;
8469 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8470 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8472 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8473 if (tem)
8474 return fold_build2_loc (loc, code, type, tem, arg1);
8477 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8478 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8480 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8481 if (tem)
8482 return fold_build2_loc (loc, code, type, arg0, tem);
8485 /* Check for the possibility of merging component references. If our
8486 lhs is another similar operation, try to merge its rhs with our
8487 rhs. Then try to merge our lhs and rhs. */
8488 if (TREE_CODE (arg0) == code
8489 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8490 TREE_OPERAND (arg0, 1), arg1)))
8491 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8493 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8494 return tem;
8496 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8497 && (code == TRUTH_AND_EXPR
8498 || code == TRUTH_ANDIF_EXPR
8499 || code == TRUTH_OR_EXPR
8500 || code == TRUTH_ORIF_EXPR))
8502 enum tree_code ncode, icode;
8504 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8505 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8506 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8508 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8509 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8510 We don't want to pack more than two leafs to a non-IF AND/OR
8511 expression.
8512 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8513 equal to IF-CODE, then we don't want to add right-hand operand.
8514 If the inner right-hand side of left-hand operand has
8515 side-effects, or isn't simple, then we can't add to it,
8516 as otherwise we might destroy if-sequence. */
8517 if (TREE_CODE (arg0) == icode
8518 && simple_operand_p_2 (arg1)
8519 /* Needed for sequence points to handle trappings, and
8520 side-effects. */
8521 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8523 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8524 arg1);
8525 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8526 tem);
8528 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8529 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8530 else if (TREE_CODE (arg1) == icode
8531 && simple_operand_p_2 (arg0)
8532 /* Needed for sequence points to handle trappings, and
8533 side-effects. */
8534 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8536 tem = fold_build2_loc (loc, ncode, type,
8537 arg0, TREE_OPERAND (arg1, 0));
8538 return fold_build2_loc (loc, icode, type, tem,
8539 TREE_OPERAND (arg1, 1));
8541 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8542 into (A OR B).
8543 For sequence point consistancy, we need to check for trapping,
8544 and side-effects. */
8545 else if (code == icode && simple_operand_p_2 (arg0)
8546 && simple_operand_p_2 (arg1))
8547 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8550 return NULL_TREE;
8553 /* Fold a binary expression of code CODE and type TYPE with operands
8554 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8555 Return the folded expression if folding is successful. Otherwise,
8556 return NULL_TREE. */
8558 static tree
8559 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8561 enum tree_code compl_code;
8563 if (code == MIN_EXPR)
8564 compl_code = MAX_EXPR;
8565 else if (code == MAX_EXPR)
8566 compl_code = MIN_EXPR;
8567 else
8568 gcc_unreachable ();
8570 /* MIN (MAX (a, b), b) == b. */
8571 if (TREE_CODE (op0) == compl_code
8572 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8573 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8575 /* MIN (MAX (b, a), b) == b. */
8576 if (TREE_CODE (op0) == compl_code
8577 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8578 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8579 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8581 /* MIN (a, MAX (a, b)) == a. */
8582 if (TREE_CODE (op1) == compl_code
8583 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8584 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8585 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8587 /* MIN (a, MAX (b, a)) == a. */
8588 if (TREE_CODE (op1) == compl_code
8589 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8590 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8591 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8593 return NULL_TREE;
8596 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8597 by changing CODE to reduce the magnitude of constants involved in
8598 ARG0 of the comparison.
8599 Returns a canonicalized comparison tree if a simplification was
8600 possible, otherwise returns NULL_TREE.
8601 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8602 valid if signed overflow is undefined. */
8604 static tree
8605 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8606 tree arg0, tree arg1,
8607 bool *strict_overflow_p)
8609 enum tree_code code0 = TREE_CODE (arg0);
8610 tree t, cst0 = NULL_TREE;
8611 int sgn0;
8612 bool swap = false;
8614 /* Match A +- CST code arg1 and CST code arg1. We can change the
8615 first form only if overflow is undefined. */
8616 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8617 /* In principle pointers also have undefined overflow behavior,
8618 but that causes problems elsewhere. */
8619 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8620 && (code0 == MINUS_EXPR
8621 || code0 == PLUS_EXPR)
8622 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8623 || code0 == INTEGER_CST))
8624 return NULL_TREE;
8626 /* Identify the constant in arg0 and its sign. */
8627 if (code0 == INTEGER_CST)
8628 cst0 = arg0;
8629 else
8630 cst0 = TREE_OPERAND (arg0, 1);
8631 sgn0 = tree_int_cst_sgn (cst0);
8633 /* Overflowed constants and zero will cause problems. */
8634 if (integer_zerop (cst0)
8635 || TREE_OVERFLOW (cst0))
8636 return NULL_TREE;
8638 /* See if we can reduce the magnitude of the constant in
8639 arg0 by changing the comparison code. */
8640 if (code0 == INTEGER_CST)
8642 /* CST <= arg1 -> CST-1 < arg1. */
8643 if (code == LE_EXPR && sgn0 == 1)
8644 code = LT_EXPR;
8645 /* -CST < arg1 -> -CST-1 <= arg1. */
8646 else if (code == LT_EXPR && sgn0 == -1)
8647 code = LE_EXPR;
8648 /* CST > arg1 -> CST-1 >= arg1. */
8649 else if (code == GT_EXPR && sgn0 == 1)
8650 code = GE_EXPR;
8651 /* -CST >= arg1 -> -CST-1 > arg1. */
8652 else if (code == GE_EXPR && sgn0 == -1)
8653 code = GT_EXPR;
8654 else
8655 return NULL_TREE;
8656 /* arg1 code' CST' might be more canonical. */
8657 swap = true;
8659 else
8661 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8662 if (code == LT_EXPR
8663 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8664 code = LE_EXPR;
8665 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8666 else if (code == GT_EXPR
8667 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8668 code = GE_EXPR;
8669 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8670 else if (code == LE_EXPR
8671 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8672 code = LT_EXPR;
8673 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8674 else if (code == GE_EXPR
8675 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8676 code = GT_EXPR;
8677 else
8678 return NULL_TREE;
8679 *strict_overflow_p = true;
8682 /* Now build the constant reduced in magnitude. But not if that
8683 would produce one outside of its types range. */
8684 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8685 && ((sgn0 == 1
8686 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8687 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8688 || (sgn0 == -1
8689 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8690 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8691 /* We cannot swap the comparison here as that would cause us to
8692 endlessly recurse. */
8693 return NULL_TREE;
8695 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8696 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8697 if (code0 != INTEGER_CST)
8698 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8699 t = fold_convert (TREE_TYPE (arg1), t);
8701 /* If swapping might yield to a more canonical form, do so. */
8702 if (swap)
8703 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8704 else
8705 return fold_build2_loc (loc, code, type, t, arg1);
8708 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8709 overflow further. Try to decrease the magnitude of constants involved
8710 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8711 and put sole constants at the second argument position.
8712 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8714 static tree
8715 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8716 tree arg0, tree arg1)
8718 tree t;
8719 bool strict_overflow_p;
8720 const char * const warnmsg = G_("assuming signed overflow does not occur "
8721 "when reducing constant in comparison");
8723 /* Try canonicalization by simplifying arg0. */
8724 strict_overflow_p = false;
8725 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8726 &strict_overflow_p);
8727 if (t)
8729 if (strict_overflow_p)
8730 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8731 return t;
8734 /* Try canonicalization by simplifying arg1 using the swapped
8735 comparison. */
8736 code = swap_tree_comparison (code);
8737 strict_overflow_p = false;
8738 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8739 &strict_overflow_p);
8740 if (t && strict_overflow_p)
8741 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8742 return t;
8745 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8746 space. This is used to avoid issuing overflow warnings for
8747 expressions like &p->x which can not wrap. */
8749 static bool
8750 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8752 double_int di_offset, total;
8754 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8755 return true;
8757 if (bitpos < 0)
8758 return true;
8760 if (offset == NULL_TREE)
8761 di_offset = double_int_zero;
8762 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8763 return true;
8764 else
8765 di_offset = TREE_INT_CST (offset);
8767 bool overflow;
8768 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8769 total = di_offset.add_with_sign (units, true, &overflow);
8770 if (overflow)
8771 return true;
8773 if (total.high != 0)
8774 return true;
8776 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8777 if (size <= 0)
8778 return true;
8780 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8781 array. */
8782 if (TREE_CODE (base) == ADDR_EXPR)
8784 HOST_WIDE_INT base_size;
8786 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8787 if (base_size > 0 && size < base_size)
8788 size = base_size;
8791 return total.low > (unsigned HOST_WIDE_INT) size;
8794 /* Subroutine of fold_binary. This routine performs all of the
8795 transformations that are common to the equality/inequality
8796 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8797 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8798 fold_binary should call fold_binary. Fold a comparison with
8799 tree code CODE and type TYPE with operands OP0 and OP1. Return
8800 the folded comparison or NULL_TREE. */
8802 static tree
8803 fold_comparison (location_t loc, enum tree_code code, tree type,
8804 tree op0, tree op1)
8806 tree arg0, arg1, tem;
8808 arg0 = op0;
8809 arg1 = op1;
8811 STRIP_SIGN_NOPS (arg0);
8812 STRIP_SIGN_NOPS (arg1);
8814 tem = fold_relational_const (code, type, arg0, arg1);
8815 if (tem != NULL_TREE)
8816 return tem;
8818 /* If one arg is a real or integer constant, put it last. */
8819 if (tree_swap_operands_p (arg0, arg1, true))
8820 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8822 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8823 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8824 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8825 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8826 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8827 && (TREE_CODE (arg1) == INTEGER_CST
8828 && !TREE_OVERFLOW (arg1)))
8830 tree const1 = TREE_OPERAND (arg0, 1);
8831 tree const2 = arg1;
8832 tree variable = TREE_OPERAND (arg0, 0);
8833 tree lhs;
8834 int lhs_add;
8835 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8837 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8838 TREE_TYPE (arg1), const2, const1);
8840 /* If the constant operation overflowed this can be
8841 simplified as a comparison against INT_MAX/INT_MIN. */
8842 if (TREE_CODE (lhs) == INTEGER_CST
8843 && TREE_OVERFLOW (lhs))
8845 int const1_sgn = tree_int_cst_sgn (const1);
8846 enum tree_code code2 = code;
8848 /* Get the sign of the constant on the lhs if the
8849 operation were VARIABLE + CONST1. */
8850 if (TREE_CODE (arg0) == MINUS_EXPR)
8851 const1_sgn = -const1_sgn;
8853 /* The sign of the constant determines if we overflowed
8854 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8855 Canonicalize to the INT_MIN overflow by swapping the comparison
8856 if necessary. */
8857 if (const1_sgn == -1)
8858 code2 = swap_tree_comparison (code);
8860 /* We now can look at the canonicalized case
8861 VARIABLE + 1 CODE2 INT_MIN
8862 and decide on the result. */
8863 if (code2 == LT_EXPR
8864 || code2 == LE_EXPR
8865 || code2 == EQ_EXPR)
8866 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8867 else if (code2 == NE_EXPR
8868 || code2 == GE_EXPR
8869 || code2 == GT_EXPR)
8870 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8873 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8874 && (TREE_CODE (lhs) != INTEGER_CST
8875 || !TREE_OVERFLOW (lhs)))
8877 if (code != EQ_EXPR && code != NE_EXPR)
8878 fold_overflow_warning ("assuming signed overflow does not occur "
8879 "when changing X +- C1 cmp C2 to "
8880 "X cmp C1 +- C2",
8881 WARN_STRICT_OVERFLOW_COMPARISON);
8882 return fold_build2_loc (loc, code, type, variable, lhs);
8886 /* For comparisons of pointers we can decompose it to a compile time
8887 comparison of the base objects and the offsets into the object.
8888 This requires at least one operand being an ADDR_EXPR or a
8889 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8890 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8891 && (TREE_CODE (arg0) == ADDR_EXPR
8892 || TREE_CODE (arg1) == ADDR_EXPR
8893 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8894 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8896 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8897 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8898 enum machine_mode mode;
8899 int volatilep, unsignedp;
8900 bool indirect_base0 = false, indirect_base1 = false;
8902 /* Get base and offset for the access. Strip ADDR_EXPR for
8903 get_inner_reference, but put it back by stripping INDIRECT_REF
8904 off the base object if possible. indirect_baseN will be true
8905 if baseN is not an address but refers to the object itself. */
8906 base0 = arg0;
8907 if (TREE_CODE (arg0) == ADDR_EXPR)
8909 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8910 &bitsize, &bitpos0, &offset0, &mode,
8911 &unsignedp, &volatilep, false);
8912 if (TREE_CODE (base0) == INDIRECT_REF)
8913 base0 = TREE_OPERAND (base0, 0);
8914 else
8915 indirect_base0 = true;
8917 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8919 base0 = TREE_OPERAND (arg0, 0);
8920 STRIP_SIGN_NOPS (base0);
8921 if (TREE_CODE (base0) == ADDR_EXPR)
8923 base0 = TREE_OPERAND (base0, 0);
8924 indirect_base0 = true;
8926 offset0 = TREE_OPERAND (arg0, 1);
8927 if (host_integerp (offset0, 0))
8929 HOST_WIDE_INT off = size_low_cst (offset0);
8930 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8931 * BITS_PER_UNIT)
8932 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8934 bitpos0 = off * BITS_PER_UNIT;
8935 offset0 = NULL_TREE;
8940 base1 = arg1;
8941 if (TREE_CODE (arg1) == ADDR_EXPR)
8943 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8944 &bitsize, &bitpos1, &offset1, &mode,
8945 &unsignedp, &volatilep, false);
8946 if (TREE_CODE (base1) == INDIRECT_REF)
8947 base1 = TREE_OPERAND (base1, 0);
8948 else
8949 indirect_base1 = true;
8951 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8953 base1 = TREE_OPERAND (arg1, 0);
8954 STRIP_SIGN_NOPS (base1);
8955 if (TREE_CODE (base1) == ADDR_EXPR)
8957 base1 = TREE_OPERAND (base1, 0);
8958 indirect_base1 = true;
8960 offset1 = TREE_OPERAND (arg1, 1);
8961 if (host_integerp (offset1, 0))
8963 HOST_WIDE_INT off = size_low_cst (offset1);
8964 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8965 * BITS_PER_UNIT)
8966 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8968 bitpos1 = off * BITS_PER_UNIT;
8969 offset1 = NULL_TREE;
8974 /* A local variable can never be pointed to by
8975 the default SSA name of an incoming parameter. */
8976 if ((TREE_CODE (arg0) == ADDR_EXPR
8977 && indirect_base0
8978 && TREE_CODE (base0) == VAR_DECL
8979 && auto_var_in_fn_p (base0, current_function_decl)
8980 && !indirect_base1
8981 && TREE_CODE (base1) == SSA_NAME
8982 && SSA_NAME_IS_DEFAULT_DEF (base1)
8983 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8984 || (TREE_CODE (arg1) == ADDR_EXPR
8985 && indirect_base1
8986 && TREE_CODE (base1) == VAR_DECL
8987 && auto_var_in_fn_p (base1, current_function_decl)
8988 && !indirect_base0
8989 && TREE_CODE (base0) == SSA_NAME
8990 && SSA_NAME_IS_DEFAULT_DEF (base0)
8991 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8993 if (code == NE_EXPR)
8994 return constant_boolean_node (1, type);
8995 else if (code == EQ_EXPR)
8996 return constant_boolean_node (0, type);
8998 /* If we have equivalent bases we might be able to simplify. */
8999 else if (indirect_base0 == indirect_base1
9000 && operand_equal_p (base0, base1, 0))
9002 /* We can fold this expression to a constant if the non-constant
9003 offset parts are equal. */
9004 if ((offset0 == offset1
9005 || (offset0 && offset1
9006 && operand_equal_p (offset0, offset1, 0)))
9007 && (code == EQ_EXPR
9008 || code == NE_EXPR
9009 || (indirect_base0 && DECL_P (base0))
9010 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9013 if (code != EQ_EXPR
9014 && code != NE_EXPR
9015 && bitpos0 != bitpos1
9016 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9017 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9018 fold_overflow_warning (("assuming pointer wraparound does not "
9019 "occur when comparing P +- C1 with "
9020 "P +- C2"),
9021 WARN_STRICT_OVERFLOW_CONDITIONAL);
9023 switch (code)
9025 case EQ_EXPR:
9026 return constant_boolean_node (bitpos0 == bitpos1, type);
9027 case NE_EXPR:
9028 return constant_boolean_node (bitpos0 != bitpos1, type);
9029 case LT_EXPR:
9030 return constant_boolean_node (bitpos0 < bitpos1, type);
9031 case LE_EXPR:
9032 return constant_boolean_node (bitpos0 <= bitpos1, type);
9033 case GE_EXPR:
9034 return constant_boolean_node (bitpos0 >= bitpos1, type);
9035 case GT_EXPR:
9036 return constant_boolean_node (bitpos0 > bitpos1, type);
9037 default:;
9040 /* We can simplify the comparison to a comparison of the variable
9041 offset parts if the constant offset parts are equal.
9042 Be careful to use signed sizetype here because otherwise we
9043 mess with array offsets in the wrong way. This is possible
9044 because pointer arithmetic is restricted to retain within an
9045 object and overflow on pointer differences is undefined as of
9046 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9047 else if (bitpos0 == bitpos1
9048 && ((code == EQ_EXPR || code == NE_EXPR)
9049 || (indirect_base0 && DECL_P (base0))
9050 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9052 /* By converting to signed sizetype we cover middle-end pointer
9053 arithmetic which operates on unsigned pointer types of size
9054 type size and ARRAY_REF offsets which are properly sign or
9055 zero extended from their type in case it is narrower than
9056 sizetype. */
9057 if (offset0 == NULL_TREE)
9058 offset0 = build_int_cst (ssizetype, 0);
9059 else
9060 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9061 if (offset1 == NULL_TREE)
9062 offset1 = build_int_cst (ssizetype, 0);
9063 else
9064 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9066 if (code != EQ_EXPR
9067 && code != NE_EXPR
9068 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9069 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9070 fold_overflow_warning (("assuming pointer wraparound does not "
9071 "occur when comparing P +- C1 with "
9072 "P +- C2"),
9073 WARN_STRICT_OVERFLOW_COMPARISON);
9075 return fold_build2_loc (loc, code, type, offset0, offset1);
9078 /* For non-equal bases we can simplify if they are addresses
9079 of local binding decls or constants. */
9080 else if (indirect_base0 && indirect_base1
9081 /* We know that !operand_equal_p (base0, base1, 0)
9082 because the if condition was false. But make
9083 sure two decls are not the same. */
9084 && base0 != base1
9085 && TREE_CODE (arg0) == ADDR_EXPR
9086 && TREE_CODE (arg1) == ADDR_EXPR
9087 && (((TREE_CODE (base0) == VAR_DECL
9088 || TREE_CODE (base0) == PARM_DECL)
9089 && (targetm.binds_local_p (base0)
9090 || CONSTANT_CLASS_P (base1)))
9091 || CONSTANT_CLASS_P (base0))
9092 && (((TREE_CODE (base1) == VAR_DECL
9093 || TREE_CODE (base1) == PARM_DECL)
9094 && (targetm.binds_local_p (base1)
9095 || CONSTANT_CLASS_P (base0)))
9096 || CONSTANT_CLASS_P (base1)))
9098 if (code == EQ_EXPR)
9099 return omit_two_operands_loc (loc, type, boolean_false_node,
9100 arg0, arg1);
9101 else if (code == NE_EXPR)
9102 return omit_two_operands_loc (loc, type, boolean_true_node,
9103 arg0, arg1);
9105 /* For equal offsets we can simplify to a comparison of the
9106 base addresses. */
9107 else if (bitpos0 == bitpos1
9108 && (indirect_base0
9109 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9110 && (indirect_base1
9111 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9112 && ((offset0 == offset1)
9113 || (offset0 && offset1
9114 && operand_equal_p (offset0, offset1, 0))))
9116 if (indirect_base0)
9117 base0 = build_fold_addr_expr_loc (loc, base0);
9118 if (indirect_base1)
9119 base1 = build_fold_addr_expr_loc (loc, base1);
9120 return fold_build2_loc (loc, code, type, base0, base1);
9124 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9125 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9126 the resulting offset is smaller in absolute value than the
9127 original one. */
9128 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9129 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9130 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9131 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9132 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9133 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9134 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9136 tree const1 = TREE_OPERAND (arg0, 1);
9137 tree const2 = TREE_OPERAND (arg1, 1);
9138 tree variable1 = TREE_OPERAND (arg0, 0);
9139 tree variable2 = TREE_OPERAND (arg1, 0);
9140 tree cst;
9141 const char * const warnmsg = G_("assuming signed overflow does not "
9142 "occur when combining constants around "
9143 "a comparison");
9145 /* Put the constant on the side where it doesn't overflow and is
9146 of lower absolute value than before. */
9147 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9148 ? MINUS_EXPR : PLUS_EXPR,
9149 const2, const1);
9150 if (!TREE_OVERFLOW (cst)
9151 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9153 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9154 return fold_build2_loc (loc, code, type,
9155 variable1,
9156 fold_build2_loc (loc,
9157 TREE_CODE (arg1), TREE_TYPE (arg1),
9158 variable2, cst));
9161 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9162 ? MINUS_EXPR : PLUS_EXPR,
9163 const1, const2);
9164 if (!TREE_OVERFLOW (cst)
9165 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9167 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9168 return fold_build2_loc (loc, code, type,
9169 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9170 variable1, cst),
9171 variable2);
9175 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9176 signed arithmetic case. That form is created by the compiler
9177 often enough for folding it to be of value. One example is in
9178 computing loop trip counts after Operator Strength Reduction. */
9179 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9180 && TREE_CODE (arg0) == MULT_EXPR
9181 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9182 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9183 && integer_zerop (arg1))
9185 tree const1 = TREE_OPERAND (arg0, 1);
9186 tree const2 = arg1; /* zero */
9187 tree variable1 = TREE_OPERAND (arg0, 0);
9188 enum tree_code cmp_code = code;
9190 /* Handle unfolded multiplication by zero. */
9191 if (integer_zerop (const1))
9192 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9194 fold_overflow_warning (("assuming signed overflow does not occur when "
9195 "eliminating multiplication in comparison "
9196 "with zero"),
9197 WARN_STRICT_OVERFLOW_COMPARISON);
9199 /* If const1 is negative we swap the sense of the comparison. */
9200 if (tree_int_cst_sgn (const1) < 0)
9201 cmp_code = swap_tree_comparison (cmp_code);
9203 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9206 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9207 if (tem)
9208 return tem;
9210 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9212 tree targ0 = strip_float_extensions (arg0);
9213 tree targ1 = strip_float_extensions (arg1);
9214 tree newtype = TREE_TYPE (targ0);
9216 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9217 newtype = TREE_TYPE (targ1);
9219 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9220 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9221 return fold_build2_loc (loc, code, type,
9222 fold_convert_loc (loc, newtype, targ0),
9223 fold_convert_loc (loc, newtype, targ1));
9225 /* (-a) CMP (-b) -> b CMP a */
9226 if (TREE_CODE (arg0) == NEGATE_EXPR
9227 && TREE_CODE (arg1) == NEGATE_EXPR)
9228 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9229 TREE_OPERAND (arg0, 0));
9231 if (TREE_CODE (arg1) == REAL_CST)
9233 REAL_VALUE_TYPE cst;
9234 cst = TREE_REAL_CST (arg1);
9236 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9237 if (TREE_CODE (arg0) == NEGATE_EXPR)
9238 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9239 TREE_OPERAND (arg0, 0),
9240 build_real (TREE_TYPE (arg1),
9241 real_value_negate (&cst)));
9243 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9244 /* a CMP (-0) -> a CMP 0 */
9245 if (REAL_VALUE_MINUS_ZERO (cst))
9246 return fold_build2_loc (loc, code, type, arg0,
9247 build_real (TREE_TYPE (arg1), dconst0));
9249 /* x != NaN is always true, other ops are always false. */
9250 if (REAL_VALUE_ISNAN (cst)
9251 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9253 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9254 return omit_one_operand_loc (loc, type, tem, arg0);
9257 /* Fold comparisons against infinity. */
9258 if (REAL_VALUE_ISINF (cst)
9259 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9261 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9262 if (tem != NULL_TREE)
9263 return tem;
9267 /* If this is a comparison of a real constant with a PLUS_EXPR
9268 or a MINUS_EXPR of a real constant, we can convert it into a
9269 comparison with a revised real constant as long as no overflow
9270 occurs when unsafe_math_optimizations are enabled. */
9271 if (flag_unsafe_math_optimizations
9272 && TREE_CODE (arg1) == REAL_CST
9273 && (TREE_CODE (arg0) == PLUS_EXPR
9274 || TREE_CODE (arg0) == MINUS_EXPR)
9275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9276 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9277 ? MINUS_EXPR : PLUS_EXPR,
9278 arg1, TREE_OPERAND (arg0, 1)))
9279 && !TREE_OVERFLOW (tem))
9280 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9282 /* Likewise, we can simplify a comparison of a real constant with
9283 a MINUS_EXPR whose first operand is also a real constant, i.e.
9284 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9285 floating-point types only if -fassociative-math is set. */
9286 if (flag_associative_math
9287 && TREE_CODE (arg1) == REAL_CST
9288 && TREE_CODE (arg0) == MINUS_EXPR
9289 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9290 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9291 arg1))
9292 && !TREE_OVERFLOW (tem))
9293 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9294 TREE_OPERAND (arg0, 1), tem);
9296 /* Fold comparisons against built-in math functions. */
9297 if (TREE_CODE (arg1) == REAL_CST
9298 && flag_unsafe_math_optimizations
9299 && ! flag_errno_math)
9301 enum built_in_function fcode = builtin_mathfn_code (arg0);
9303 if (fcode != END_BUILTINS)
9305 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9306 if (tem != NULL_TREE)
9307 return tem;
9312 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9313 && CONVERT_EXPR_P (arg0))
9315 /* If we are widening one operand of an integer comparison,
9316 see if the other operand is similarly being widened. Perhaps we
9317 can do the comparison in the narrower type. */
9318 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9319 if (tem)
9320 return tem;
9322 /* Or if we are changing signedness. */
9323 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9324 if (tem)
9325 return tem;
9328 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9329 constant, we can simplify it. */
9330 if (TREE_CODE (arg1) == INTEGER_CST
9331 && (TREE_CODE (arg0) == MIN_EXPR
9332 || TREE_CODE (arg0) == MAX_EXPR)
9333 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9335 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9336 if (tem)
9337 return tem;
9340 /* Simplify comparison of something with itself. (For IEEE
9341 floating-point, we can only do some of these simplifications.) */
9342 if (operand_equal_p (arg0, arg1, 0))
9344 switch (code)
9346 case EQ_EXPR:
9347 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9348 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9349 return constant_boolean_node (1, type);
9350 break;
9352 case GE_EXPR:
9353 case LE_EXPR:
9354 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9355 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9356 return constant_boolean_node (1, type);
9357 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9359 case NE_EXPR:
9360 /* For NE, we can only do this simplification if integer
9361 or we don't honor IEEE floating point NaNs. */
9362 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9363 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9364 break;
9365 /* ... fall through ... */
9366 case GT_EXPR:
9367 case LT_EXPR:
9368 return constant_boolean_node (0, type);
9369 default:
9370 gcc_unreachable ();
9374 /* If we are comparing an expression that just has comparisons
9375 of two integer values, arithmetic expressions of those comparisons,
9376 and constants, we can simplify it. There are only three cases
9377 to check: the two values can either be equal, the first can be
9378 greater, or the second can be greater. Fold the expression for
9379 those three values. Since each value must be 0 or 1, we have
9380 eight possibilities, each of which corresponds to the constant 0
9381 or 1 or one of the six possible comparisons.
9383 This handles common cases like (a > b) == 0 but also handles
9384 expressions like ((x > y) - (y > x)) > 0, which supposedly
9385 occur in macroized code. */
9387 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9389 tree cval1 = 0, cval2 = 0;
9390 int save_p = 0;
9392 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9393 /* Don't handle degenerate cases here; they should already
9394 have been handled anyway. */
9395 && cval1 != 0 && cval2 != 0
9396 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9397 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9398 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9399 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9400 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9401 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9402 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9404 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9405 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9407 /* We can't just pass T to eval_subst in case cval1 or cval2
9408 was the same as ARG1. */
9410 tree high_result
9411 = fold_build2_loc (loc, code, type,
9412 eval_subst (loc, arg0, cval1, maxval,
9413 cval2, minval),
9414 arg1);
9415 tree equal_result
9416 = fold_build2_loc (loc, code, type,
9417 eval_subst (loc, arg0, cval1, maxval,
9418 cval2, maxval),
9419 arg1);
9420 tree low_result
9421 = fold_build2_loc (loc, code, type,
9422 eval_subst (loc, arg0, cval1, minval,
9423 cval2, maxval),
9424 arg1);
9426 /* All three of these results should be 0 or 1. Confirm they are.
9427 Then use those values to select the proper code to use. */
9429 if (TREE_CODE (high_result) == INTEGER_CST
9430 && TREE_CODE (equal_result) == INTEGER_CST
9431 && TREE_CODE (low_result) == INTEGER_CST)
9433 /* Make a 3-bit mask with the high-order bit being the
9434 value for `>', the next for '=', and the low for '<'. */
9435 switch ((integer_onep (high_result) * 4)
9436 + (integer_onep (equal_result) * 2)
9437 + integer_onep (low_result))
9439 case 0:
9440 /* Always false. */
9441 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9442 case 1:
9443 code = LT_EXPR;
9444 break;
9445 case 2:
9446 code = EQ_EXPR;
9447 break;
9448 case 3:
9449 code = LE_EXPR;
9450 break;
9451 case 4:
9452 code = GT_EXPR;
9453 break;
9454 case 5:
9455 code = NE_EXPR;
9456 break;
9457 case 6:
9458 code = GE_EXPR;
9459 break;
9460 case 7:
9461 /* Always true. */
9462 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9465 if (save_p)
9467 tem = save_expr (build2 (code, type, cval1, cval2));
9468 SET_EXPR_LOCATION (tem, loc);
9469 return tem;
9471 return fold_build2_loc (loc, code, type, cval1, cval2);
9476 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9477 into a single range test. */
9478 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9479 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9480 && TREE_CODE (arg1) == INTEGER_CST
9481 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9482 && !integer_zerop (TREE_OPERAND (arg0, 1))
9483 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9484 && !TREE_OVERFLOW (arg1))
9486 tem = fold_div_compare (loc, code, type, arg0, arg1);
9487 if (tem != NULL_TREE)
9488 return tem;
9491 /* Fold ~X op ~Y as Y op X. */
9492 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9493 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9495 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9496 return fold_build2_loc (loc, code, type,
9497 fold_convert_loc (loc, cmp_type,
9498 TREE_OPERAND (arg1, 0)),
9499 TREE_OPERAND (arg0, 0));
9502 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9503 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9504 && TREE_CODE (arg1) == INTEGER_CST)
9506 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9507 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9508 TREE_OPERAND (arg0, 0),
9509 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9510 fold_convert_loc (loc, cmp_type, arg1)));
9513 return NULL_TREE;
9517 /* Subroutine of fold_binary. Optimize complex multiplications of the
9518 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9519 argument EXPR represents the expression "z" of type TYPE. */
9521 static tree
9522 fold_mult_zconjz (location_t loc, tree type, tree expr)
9524 tree itype = TREE_TYPE (type);
9525 tree rpart, ipart, tem;
9527 if (TREE_CODE (expr) == COMPLEX_EXPR)
9529 rpart = TREE_OPERAND (expr, 0);
9530 ipart = TREE_OPERAND (expr, 1);
9532 else if (TREE_CODE (expr) == COMPLEX_CST)
9534 rpart = TREE_REALPART (expr);
9535 ipart = TREE_IMAGPART (expr);
9537 else
9539 expr = save_expr (expr);
9540 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9541 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9544 rpart = save_expr (rpart);
9545 ipart = save_expr (ipart);
9546 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9547 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9548 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9549 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9550 build_zero_cst (itype));
9554 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9555 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9556 guarantees that P and N have the same least significant log2(M) bits.
9557 N is not otherwise constrained. In particular, N is not normalized to
9558 0 <= N < M as is common. In general, the precise value of P is unknown.
9559 M is chosen as large as possible such that constant N can be determined.
9561 Returns M and sets *RESIDUE to N.
9563 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9564 account. This is not always possible due to PR 35705.
9567 static unsigned HOST_WIDE_INT
9568 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9569 bool allow_func_align)
9571 enum tree_code code;
9573 *residue = 0;
9575 code = TREE_CODE (expr);
9576 if (code == ADDR_EXPR)
9578 unsigned int bitalign;
9579 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9580 *residue /= BITS_PER_UNIT;
9581 return bitalign / BITS_PER_UNIT;
9583 else if (code == POINTER_PLUS_EXPR)
9585 tree op0, op1;
9586 unsigned HOST_WIDE_INT modulus;
9587 enum tree_code inner_code;
9589 op0 = TREE_OPERAND (expr, 0);
9590 STRIP_NOPS (op0);
9591 modulus = get_pointer_modulus_and_residue (op0, residue,
9592 allow_func_align);
9594 op1 = TREE_OPERAND (expr, 1);
9595 STRIP_NOPS (op1);
9596 inner_code = TREE_CODE (op1);
9597 if (inner_code == INTEGER_CST)
9599 *residue += TREE_INT_CST_LOW (op1);
9600 return modulus;
9602 else if (inner_code == MULT_EXPR)
9604 op1 = TREE_OPERAND (op1, 1);
9605 if (TREE_CODE (op1) == INTEGER_CST)
9607 unsigned HOST_WIDE_INT align;
9609 /* Compute the greatest power-of-2 divisor of op1. */
9610 align = TREE_INT_CST_LOW (op1);
9611 align &= -align;
9613 /* If align is non-zero and less than *modulus, replace
9614 *modulus with align., If align is 0, then either op1 is 0
9615 or the greatest power-of-2 divisor of op1 doesn't fit in an
9616 unsigned HOST_WIDE_INT. In either case, no additional
9617 constraint is imposed. */
9618 if (align)
9619 modulus = MIN (modulus, align);
9621 return modulus;
9626 /* If we get here, we were unable to determine anything useful about the
9627 expression. */
9628 return 1;
9631 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9632 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9634 static bool
9635 vec_cst_ctor_to_array (tree arg, tree *elts)
9637 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9639 if (TREE_CODE (arg) == VECTOR_CST)
9641 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9642 elts[i] = VECTOR_CST_ELT (arg, i);
9644 else if (TREE_CODE (arg) == CONSTRUCTOR)
9646 constructor_elt *elt;
9648 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9649 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9650 return false;
9651 else
9652 elts[i] = elt->value;
9654 else
9655 return false;
9656 for (; i < nelts; i++)
9657 elts[i]
9658 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9659 return true;
9662 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9663 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9664 NULL_TREE otherwise. */
9666 static tree
9667 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9669 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9670 tree *elts;
9671 bool need_ctor = false;
9673 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9674 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9675 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9676 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9677 return NULL_TREE;
9679 elts = XALLOCAVEC (tree, nelts * 3);
9680 if (!vec_cst_ctor_to_array (arg0, elts)
9681 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9682 return NULL_TREE;
9684 for (i = 0; i < nelts; i++)
9686 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9687 need_ctor = true;
9688 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9691 if (need_ctor)
9693 vec<constructor_elt, va_gc> *v;
9694 vec_alloc (v, nelts);
9695 for (i = 0; i < nelts; i++)
9696 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9697 return build_constructor (type, v);
9699 else
9700 return build_vector (type, &elts[2 * nelts]);
9703 /* Try to fold a pointer difference of type TYPE two address expressions of
9704 array references AREF0 and AREF1 using location LOC. Return a
9705 simplified expression for the difference or NULL_TREE. */
9707 static tree
9708 fold_addr_of_array_ref_difference (location_t loc, tree type,
9709 tree aref0, tree aref1)
9711 tree base0 = TREE_OPERAND (aref0, 0);
9712 tree base1 = TREE_OPERAND (aref1, 0);
9713 tree base_offset = build_int_cst (type, 0);
9715 /* If the bases are array references as well, recurse. If the bases
9716 are pointer indirections compute the difference of the pointers.
9717 If the bases are equal, we are set. */
9718 if ((TREE_CODE (base0) == ARRAY_REF
9719 && TREE_CODE (base1) == ARRAY_REF
9720 && (base_offset
9721 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9722 || (INDIRECT_REF_P (base0)
9723 && INDIRECT_REF_P (base1)
9724 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9725 TREE_OPERAND (base0, 0),
9726 TREE_OPERAND (base1, 0))))
9727 || operand_equal_p (base0, base1, 0))
9729 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9730 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9731 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9732 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9733 return fold_build2_loc (loc, PLUS_EXPR, type,
9734 base_offset,
9735 fold_build2_loc (loc, MULT_EXPR, type,
9736 diff, esz));
9738 return NULL_TREE;
9741 /* If the real or vector real constant CST of type TYPE has an exact
9742 inverse, return it, else return NULL. */
9744 static tree
9745 exact_inverse (tree type, tree cst)
9747 REAL_VALUE_TYPE r;
9748 tree unit_type, *elts;
9749 enum machine_mode mode;
9750 unsigned vec_nelts, i;
9752 switch (TREE_CODE (cst))
9754 case REAL_CST:
9755 r = TREE_REAL_CST (cst);
9757 if (exact_real_inverse (TYPE_MODE (type), &r))
9758 return build_real (type, r);
9760 return NULL_TREE;
9762 case VECTOR_CST:
9763 vec_nelts = VECTOR_CST_NELTS (cst);
9764 elts = XALLOCAVEC (tree, vec_nelts);
9765 unit_type = TREE_TYPE (type);
9766 mode = TYPE_MODE (unit_type);
9768 for (i = 0; i < vec_nelts; i++)
9770 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9771 if (!exact_real_inverse (mode, &r))
9772 return NULL_TREE;
9773 elts[i] = build_real (unit_type, r);
9776 return build_vector (type, elts);
9778 default:
9779 return NULL_TREE;
9783 /* Fold a binary expression of code CODE and type TYPE with operands
9784 OP0 and OP1. LOC is the location of the resulting expression.
9785 Return the folded expression if folding is successful. Otherwise,
9786 return NULL_TREE. */
9788 tree
9789 fold_binary_loc (location_t loc,
9790 enum tree_code code, tree type, tree op0, tree op1)
9792 enum tree_code_class kind = TREE_CODE_CLASS (code);
9793 tree arg0, arg1, tem;
9794 tree t1 = NULL_TREE;
9795 bool strict_overflow_p;
9797 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9798 && TREE_CODE_LENGTH (code) == 2
9799 && op0 != NULL_TREE
9800 && op1 != NULL_TREE);
9802 arg0 = op0;
9803 arg1 = op1;
9805 /* Strip any conversions that don't change the mode. This is
9806 safe for every expression, except for a comparison expression
9807 because its signedness is derived from its operands. So, in
9808 the latter case, only strip conversions that don't change the
9809 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9810 preserved.
9812 Note that this is done as an internal manipulation within the
9813 constant folder, in order to find the simplest representation
9814 of the arguments so that their form can be studied. In any
9815 cases, the appropriate type conversions should be put back in
9816 the tree that will get out of the constant folder. */
9818 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9820 STRIP_SIGN_NOPS (arg0);
9821 STRIP_SIGN_NOPS (arg1);
9823 else
9825 STRIP_NOPS (arg0);
9826 STRIP_NOPS (arg1);
9829 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9830 constant but we can't do arithmetic on them. */
9831 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9832 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9833 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9834 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9835 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9836 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9838 if (kind == tcc_binary)
9840 /* Make sure type and arg0 have the same saturating flag. */
9841 gcc_assert (TYPE_SATURATING (type)
9842 == TYPE_SATURATING (TREE_TYPE (arg0)));
9843 tem = const_binop (code, arg0, arg1);
9845 else if (kind == tcc_comparison)
9846 tem = fold_relational_const (code, type, arg0, arg1);
9847 else
9848 tem = NULL_TREE;
9850 if (tem != NULL_TREE)
9852 if (TREE_TYPE (tem) != type)
9853 tem = fold_convert_loc (loc, type, tem);
9854 return tem;
9858 /* If this is a commutative operation, and ARG0 is a constant, move it
9859 to ARG1 to reduce the number of tests below. */
9860 if (commutative_tree_code (code)
9861 && tree_swap_operands_p (arg0, arg1, true))
9862 return fold_build2_loc (loc, code, type, op1, op0);
9864 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9866 First check for cases where an arithmetic operation is applied to a
9867 compound, conditional, or comparison operation. Push the arithmetic
9868 operation inside the compound or conditional to see if any folding
9869 can then be done. Convert comparison to conditional for this purpose.
9870 The also optimizes non-constant cases that used to be done in
9871 expand_expr.
9873 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9874 one of the operands is a comparison and the other is a comparison, a
9875 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9876 code below would make the expression more complex. Change it to a
9877 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9878 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9880 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9881 || code == EQ_EXPR || code == NE_EXPR)
9882 && TREE_CODE (type) != VECTOR_TYPE
9883 && ((truth_value_p (TREE_CODE (arg0))
9884 && (truth_value_p (TREE_CODE (arg1))
9885 || (TREE_CODE (arg1) == BIT_AND_EXPR
9886 && integer_onep (TREE_OPERAND (arg1, 1)))))
9887 || (truth_value_p (TREE_CODE (arg1))
9888 && (truth_value_p (TREE_CODE (arg0))
9889 || (TREE_CODE (arg0) == BIT_AND_EXPR
9890 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9892 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9893 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9894 : TRUTH_XOR_EXPR,
9895 boolean_type_node,
9896 fold_convert_loc (loc, boolean_type_node, arg0),
9897 fold_convert_loc (loc, boolean_type_node, arg1));
9899 if (code == EQ_EXPR)
9900 tem = invert_truthvalue_loc (loc, tem);
9902 return fold_convert_loc (loc, type, tem);
9905 if (TREE_CODE_CLASS (code) == tcc_binary
9906 || TREE_CODE_CLASS (code) == tcc_comparison)
9908 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9910 tem = fold_build2_loc (loc, code, type,
9911 fold_convert_loc (loc, TREE_TYPE (op0),
9912 TREE_OPERAND (arg0, 1)), op1);
9913 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9914 tem);
9916 if (TREE_CODE (arg1) == COMPOUND_EXPR
9917 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9919 tem = fold_build2_loc (loc, code, type, op0,
9920 fold_convert_loc (loc, TREE_TYPE (op1),
9921 TREE_OPERAND (arg1, 1)));
9922 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9923 tem);
9926 if (TREE_CODE (arg0) == COND_EXPR
9927 || TREE_CODE (arg0) == VEC_COND_EXPR
9928 || COMPARISON_CLASS_P (arg0))
9930 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9931 arg0, arg1,
9932 /*cond_first_p=*/1);
9933 if (tem != NULL_TREE)
9934 return tem;
9937 if (TREE_CODE (arg1) == COND_EXPR
9938 || TREE_CODE (arg1) == VEC_COND_EXPR
9939 || COMPARISON_CLASS_P (arg1))
9941 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9942 arg1, arg0,
9943 /*cond_first_p=*/0);
9944 if (tem != NULL_TREE)
9945 return tem;
9949 switch (code)
9951 case MEM_REF:
9952 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9953 if (TREE_CODE (arg0) == ADDR_EXPR
9954 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9956 tree iref = TREE_OPERAND (arg0, 0);
9957 return fold_build2 (MEM_REF, type,
9958 TREE_OPERAND (iref, 0),
9959 int_const_binop (PLUS_EXPR, arg1,
9960 TREE_OPERAND (iref, 1)));
9963 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9964 if (TREE_CODE (arg0) == ADDR_EXPR
9965 && handled_component_p (TREE_OPERAND (arg0, 0)))
9967 tree base;
9968 HOST_WIDE_INT coffset;
9969 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9970 &coffset);
9971 if (!base)
9972 return NULL_TREE;
9973 return fold_build2 (MEM_REF, type,
9974 build_fold_addr_expr (base),
9975 int_const_binop (PLUS_EXPR, arg1,
9976 size_int (coffset)));
9979 return NULL_TREE;
9981 case POINTER_PLUS_EXPR:
9982 /* 0 +p index -> (type)index */
9983 if (integer_zerop (arg0))
9984 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9986 /* PTR +p 0 -> PTR */
9987 if (integer_zerop (arg1))
9988 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9990 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9991 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9992 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9993 return fold_convert_loc (loc, type,
9994 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9995 fold_convert_loc (loc, sizetype,
9996 arg1),
9997 fold_convert_loc (loc, sizetype,
9998 arg0)));
10000 /* (PTR +p B) +p A -> PTR +p (B + A) */
10001 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10003 tree inner;
10004 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10005 tree arg00 = TREE_OPERAND (arg0, 0);
10006 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10007 arg01, fold_convert_loc (loc, sizetype, arg1));
10008 return fold_convert_loc (loc, type,
10009 fold_build_pointer_plus_loc (loc,
10010 arg00, inner));
10013 /* PTR_CST +p CST -> CST1 */
10014 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10015 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10016 fold_convert_loc (loc, type, arg1));
10018 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10019 of the array. Loop optimizer sometimes produce this type of
10020 expressions. */
10021 if (TREE_CODE (arg0) == ADDR_EXPR)
10023 tem = try_move_mult_to_index (loc, arg0,
10024 fold_convert_loc (loc,
10025 ssizetype, arg1));
10026 if (tem)
10027 return fold_convert_loc (loc, type, tem);
10030 return NULL_TREE;
10032 case PLUS_EXPR:
10033 /* A + (-B) -> A - B */
10034 if (TREE_CODE (arg1) == NEGATE_EXPR)
10035 return fold_build2_loc (loc, MINUS_EXPR, type,
10036 fold_convert_loc (loc, type, arg0),
10037 fold_convert_loc (loc, type,
10038 TREE_OPERAND (arg1, 0)));
10039 /* (-A) + B -> B - A */
10040 if (TREE_CODE (arg0) == NEGATE_EXPR
10041 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10042 return fold_build2_loc (loc, MINUS_EXPR, type,
10043 fold_convert_loc (loc, type, arg1),
10044 fold_convert_loc (loc, type,
10045 TREE_OPERAND (arg0, 0)));
10047 if (INTEGRAL_TYPE_P (type))
10049 /* Convert ~A + 1 to -A. */
10050 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10051 && integer_onep (arg1))
10052 return fold_build1_loc (loc, NEGATE_EXPR, type,
10053 fold_convert_loc (loc, type,
10054 TREE_OPERAND (arg0, 0)));
10056 /* ~X + X is -1. */
10057 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10058 && !TYPE_OVERFLOW_TRAPS (type))
10060 tree tem = TREE_OPERAND (arg0, 0);
10062 STRIP_NOPS (tem);
10063 if (operand_equal_p (tem, arg1, 0))
10065 t1 = build_int_cst_type (type, -1);
10066 return omit_one_operand_loc (loc, type, t1, arg1);
10070 /* X + ~X is -1. */
10071 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10072 && !TYPE_OVERFLOW_TRAPS (type))
10074 tree tem = TREE_OPERAND (arg1, 0);
10076 STRIP_NOPS (tem);
10077 if (operand_equal_p (arg0, tem, 0))
10079 t1 = build_int_cst_type (type, -1);
10080 return omit_one_operand_loc (loc, type, t1, arg0);
10084 /* X + (X / CST) * -CST is X % CST. */
10085 if (TREE_CODE (arg1) == MULT_EXPR
10086 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10087 && operand_equal_p (arg0,
10088 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10090 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10091 tree cst1 = TREE_OPERAND (arg1, 1);
10092 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10093 cst1, cst0);
10094 if (sum && integer_zerop (sum))
10095 return fold_convert_loc (loc, type,
10096 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10097 TREE_TYPE (arg0), arg0,
10098 cst0));
10102 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10103 one. Make sure the type is not saturating and has the signedness of
10104 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10105 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10106 if ((TREE_CODE (arg0) == MULT_EXPR
10107 || TREE_CODE (arg1) == MULT_EXPR)
10108 && !TYPE_SATURATING (type)
10109 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10110 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10111 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10113 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10114 if (tem)
10115 return tem;
10118 if (! FLOAT_TYPE_P (type))
10120 if (integer_zerop (arg1))
10121 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10123 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10124 with a constant, and the two constants have no bits in common,
10125 we should treat this as a BIT_IOR_EXPR since this may produce more
10126 simplifications. */
10127 if (TREE_CODE (arg0) == BIT_AND_EXPR
10128 && TREE_CODE (arg1) == BIT_AND_EXPR
10129 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10130 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10131 && integer_zerop (const_binop (BIT_AND_EXPR,
10132 TREE_OPERAND (arg0, 1),
10133 TREE_OPERAND (arg1, 1))))
10135 code = BIT_IOR_EXPR;
10136 goto bit_ior;
10139 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10140 (plus (plus (mult) (mult)) (foo)) so that we can
10141 take advantage of the factoring cases below. */
10142 if (TYPE_OVERFLOW_WRAPS (type)
10143 && (((TREE_CODE (arg0) == PLUS_EXPR
10144 || TREE_CODE (arg0) == MINUS_EXPR)
10145 && TREE_CODE (arg1) == MULT_EXPR)
10146 || ((TREE_CODE (arg1) == PLUS_EXPR
10147 || TREE_CODE (arg1) == MINUS_EXPR)
10148 && TREE_CODE (arg0) == MULT_EXPR)))
10150 tree parg0, parg1, parg, marg;
10151 enum tree_code pcode;
10153 if (TREE_CODE (arg1) == MULT_EXPR)
10154 parg = arg0, marg = arg1;
10155 else
10156 parg = arg1, marg = arg0;
10157 pcode = TREE_CODE (parg);
10158 parg0 = TREE_OPERAND (parg, 0);
10159 parg1 = TREE_OPERAND (parg, 1);
10160 STRIP_NOPS (parg0);
10161 STRIP_NOPS (parg1);
10163 if (TREE_CODE (parg0) == MULT_EXPR
10164 && TREE_CODE (parg1) != MULT_EXPR)
10165 return fold_build2_loc (loc, pcode, type,
10166 fold_build2_loc (loc, PLUS_EXPR, type,
10167 fold_convert_loc (loc, type,
10168 parg0),
10169 fold_convert_loc (loc, type,
10170 marg)),
10171 fold_convert_loc (loc, type, parg1));
10172 if (TREE_CODE (parg0) != MULT_EXPR
10173 && TREE_CODE (parg1) == MULT_EXPR)
10174 return
10175 fold_build2_loc (loc, PLUS_EXPR, type,
10176 fold_convert_loc (loc, type, parg0),
10177 fold_build2_loc (loc, pcode, type,
10178 fold_convert_loc (loc, type, marg),
10179 fold_convert_loc (loc, type,
10180 parg1)));
10183 else
10185 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10186 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10187 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10189 /* Likewise if the operands are reversed. */
10190 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10191 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10193 /* Convert X + -C into X - C. */
10194 if (TREE_CODE (arg1) == REAL_CST
10195 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10197 tem = fold_negate_const (arg1, type);
10198 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10199 return fold_build2_loc (loc, MINUS_EXPR, type,
10200 fold_convert_loc (loc, type, arg0),
10201 fold_convert_loc (loc, type, tem));
10204 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10205 to __complex__ ( x, y ). This is not the same for SNaNs or
10206 if signed zeros are involved. */
10207 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10208 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10209 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10211 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10212 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10213 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10214 bool arg0rz = false, arg0iz = false;
10215 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10216 || (arg0i && (arg0iz = real_zerop (arg0i))))
10218 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10219 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10220 if (arg0rz && arg1i && real_zerop (arg1i))
10222 tree rp = arg1r ? arg1r
10223 : build1 (REALPART_EXPR, rtype, arg1);
10224 tree ip = arg0i ? arg0i
10225 : build1 (IMAGPART_EXPR, rtype, arg0);
10226 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10228 else if (arg0iz && arg1r && real_zerop (arg1r))
10230 tree rp = arg0r ? arg0r
10231 : build1 (REALPART_EXPR, rtype, arg0);
10232 tree ip = arg1i ? arg1i
10233 : build1 (IMAGPART_EXPR, rtype, arg1);
10234 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10239 if (flag_unsafe_math_optimizations
10240 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10241 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10242 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10243 return tem;
10245 /* Convert x+x into x*2.0. */
10246 if (operand_equal_p (arg0, arg1, 0)
10247 && SCALAR_FLOAT_TYPE_P (type))
10248 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10249 build_real (type, dconst2));
10251 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10252 We associate floats only if the user has specified
10253 -fassociative-math. */
10254 if (flag_associative_math
10255 && TREE_CODE (arg1) == PLUS_EXPR
10256 && TREE_CODE (arg0) != MULT_EXPR)
10258 tree tree10 = TREE_OPERAND (arg1, 0);
10259 tree tree11 = TREE_OPERAND (arg1, 1);
10260 if (TREE_CODE (tree11) == MULT_EXPR
10261 && TREE_CODE (tree10) == MULT_EXPR)
10263 tree tree0;
10264 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10265 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10268 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10269 We associate floats only if the user has specified
10270 -fassociative-math. */
10271 if (flag_associative_math
10272 && TREE_CODE (arg0) == PLUS_EXPR
10273 && TREE_CODE (arg1) != MULT_EXPR)
10275 tree tree00 = TREE_OPERAND (arg0, 0);
10276 tree tree01 = TREE_OPERAND (arg0, 1);
10277 if (TREE_CODE (tree01) == MULT_EXPR
10278 && TREE_CODE (tree00) == MULT_EXPR)
10280 tree tree0;
10281 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10282 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10287 bit_rotate:
10288 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10289 is a rotate of A by C1 bits. */
10290 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10291 is a rotate of A by B bits. */
10293 enum tree_code code0, code1;
10294 tree rtype;
10295 code0 = TREE_CODE (arg0);
10296 code1 = TREE_CODE (arg1);
10297 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10298 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10299 && operand_equal_p (TREE_OPERAND (arg0, 0),
10300 TREE_OPERAND (arg1, 0), 0)
10301 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10302 TYPE_UNSIGNED (rtype))
10303 /* Only create rotates in complete modes. Other cases are not
10304 expanded properly. */
10305 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10307 tree tree01, tree11;
10308 enum tree_code code01, code11;
10310 tree01 = TREE_OPERAND (arg0, 1);
10311 tree11 = TREE_OPERAND (arg1, 1);
10312 STRIP_NOPS (tree01);
10313 STRIP_NOPS (tree11);
10314 code01 = TREE_CODE (tree01);
10315 code11 = TREE_CODE (tree11);
10316 if (code01 == INTEGER_CST
10317 && code11 == INTEGER_CST
10318 && TREE_INT_CST_HIGH (tree01) == 0
10319 && TREE_INT_CST_HIGH (tree11) == 0
10320 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10321 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10323 tem = build2_loc (loc, LROTATE_EXPR,
10324 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10325 TREE_OPERAND (arg0, 0),
10326 code0 == LSHIFT_EXPR ? tree01 : tree11);
10327 return fold_convert_loc (loc, type, tem);
10329 else if (code11 == MINUS_EXPR)
10331 tree tree110, tree111;
10332 tree110 = TREE_OPERAND (tree11, 0);
10333 tree111 = TREE_OPERAND (tree11, 1);
10334 STRIP_NOPS (tree110);
10335 STRIP_NOPS (tree111);
10336 if (TREE_CODE (tree110) == INTEGER_CST
10337 && 0 == compare_tree_int (tree110,
10338 TYPE_PRECISION
10339 (TREE_TYPE (TREE_OPERAND
10340 (arg0, 0))))
10341 && operand_equal_p (tree01, tree111, 0))
10342 return
10343 fold_convert_loc (loc, type,
10344 build2 ((code0 == LSHIFT_EXPR
10345 ? LROTATE_EXPR
10346 : RROTATE_EXPR),
10347 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10348 TREE_OPERAND (arg0, 0), tree01));
10350 else if (code01 == MINUS_EXPR)
10352 tree tree010, tree011;
10353 tree010 = TREE_OPERAND (tree01, 0);
10354 tree011 = TREE_OPERAND (tree01, 1);
10355 STRIP_NOPS (tree010);
10356 STRIP_NOPS (tree011);
10357 if (TREE_CODE (tree010) == INTEGER_CST
10358 && 0 == compare_tree_int (tree010,
10359 TYPE_PRECISION
10360 (TREE_TYPE (TREE_OPERAND
10361 (arg0, 0))))
10362 && operand_equal_p (tree11, tree011, 0))
10363 return fold_convert_loc
10364 (loc, type,
10365 build2 ((code0 != LSHIFT_EXPR
10366 ? LROTATE_EXPR
10367 : RROTATE_EXPR),
10368 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10369 TREE_OPERAND (arg0, 0), tree11));
10374 associate:
10375 /* In most languages, can't associate operations on floats through
10376 parentheses. Rather than remember where the parentheses were, we
10377 don't associate floats at all, unless the user has specified
10378 -fassociative-math.
10379 And, we need to make sure type is not saturating. */
10381 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10382 && !TYPE_SATURATING (type))
10384 tree var0, con0, lit0, minus_lit0;
10385 tree var1, con1, lit1, minus_lit1;
10386 tree atype = type;
10387 bool ok = true;
10389 /* Split both trees into variables, constants, and literals. Then
10390 associate each group together, the constants with literals,
10391 then the result with variables. This increases the chances of
10392 literals being recombined later and of generating relocatable
10393 expressions for the sum of a constant and literal. */
10394 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10395 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10396 code == MINUS_EXPR);
10398 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10399 if (code == MINUS_EXPR)
10400 code = PLUS_EXPR;
10402 /* With undefined overflow prefer doing association in a type
10403 which wraps on overflow, if that is one of the operand types. */
10404 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10405 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10407 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10408 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10409 atype = TREE_TYPE (arg0);
10410 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10411 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10412 atype = TREE_TYPE (arg1);
10413 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10416 /* With undefined overflow we can only associate constants with one
10417 variable, and constants whose association doesn't overflow. */
10418 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10419 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10421 if (var0 && var1)
10423 tree tmp0 = var0;
10424 tree tmp1 = var1;
10426 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10427 tmp0 = TREE_OPERAND (tmp0, 0);
10428 if (CONVERT_EXPR_P (tmp0)
10429 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10430 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10431 <= TYPE_PRECISION (atype)))
10432 tmp0 = TREE_OPERAND (tmp0, 0);
10433 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10434 tmp1 = TREE_OPERAND (tmp1, 0);
10435 if (CONVERT_EXPR_P (tmp1)
10436 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10437 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10438 <= TYPE_PRECISION (atype)))
10439 tmp1 = TREE_OPERAND (tmp1, 0);
10440 /* The only case we can still associate with two variables
10441 is if they are the same, modulo negation and bit-pattern
10442 preserving conversions. */
10443 if (!operand_equal_p (tmp0, tmp1, 0))
10444 ok = false;
10448 /* Only do something if we found more than two objects. Otherwise,
10449 nothing has changed and we risk infinite recursion. */
10450 if (ok
10451 && (2 < ((var0 != 0) + (var1 != 0)
10452 + (con0 != 0) + (con1 != 0)
10453 + (lit0 != 0) + (lit1 != 0)
10454 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10456 bool any_overflows = false;
10457 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10458 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10459 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10460 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10461 var0 = associate_trees (loc, var0, var1, code, atype);
10462 con0 = associate_trees (loc, con0, con1, code, atype);
10463 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10464 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10465 code, atype);
10467 /* Preserve the MINUS_EXPR if the negative part of the literal is
10468 greater than the positive part. Otherwise, the multiplicative
10469 folding code (i.e extract_muldiv) may be fooled in case
10470 unsigned constants are subtracted, like in the following
10471 example: ((X*2 + 4) - 8U)/2. */
10472 if (minus_lit0 && lit0)
10474 if (TREE_CODE (lit0) == INTEGER_CST
10475 && TREE_CODE (minus_lit0) == INTEGER_CST
10476 && tree_int_cst_lt (lit0, minus_lit0))
10478 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10479 MINUS_EXPR, atype);
10480 lit0 = 0;
10482 else
10484 lit0 = associate_trees (loc, lit0, minus_lit0,
10485 MINUS_EXPR, atype);
10486 minus_lit0 = 0;
10490 /* Don't introduce overflows through reassociation. */
10491 if (!any_overflows
10492 && ((lit0 && TREE_OVERFLOW (lit0))
10493 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10494 return NULL_TREE;
10496 if (minus_lit0)
10498 if (con0 == 0)
10499 return
10500 fold_convert_loc (loc, type,
10501 associate_trees (loc, var0, minus_lit0,
10502 MINUS_EXPR, atype));
10503 else
10505 con0 = associate_trees (loc, con0, minus_lit0,
10506 MINUS_EXPR, atype);
10507 return
10508 fold_convert_loc (loc, type,
10509 associate_trees (loc, var0, con0,
10510 PLUS_EXPR, atype));
10514 con0 = associate_trees (loc, con0, lit0, code, atype);
10515 return
10516 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10517 code, atype));
10521 return NULL_TREE;
10523 case MINUS_EXPR:
10524 /* Pointer simplifications for subtraction, simple reassociations. */
10525 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10527 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10528 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10529 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10531 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10532 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10533 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10534 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10535 return fold_build2_loc (loc, PLUS_EXPR, type,
10536 fold_build2_loc (loc, MINUS_EXPR, type,
10537 arg00, arg10),
10538 fold_build2_loc (loc, MINUS_EXPR, type,
10539 arg01, arg11));
10541 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10542 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10544 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10545 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10546 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10547 fold_convert_loc (loc, type, arg1));
10548 if (tmp)
10549 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10552 /* A - (-B) -> A + B */
10553 if (TREE_CODE (arg1) == NEGATE_EXPR)
10554 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10555 fold_convert_loc (loc, type,
10556 TREE_OPERAND (arg1, 0)));
10557 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10558 if (TREE_CODE (arg0) == NEGATE_EXPR
10559 && (FLOAT_TYPE_P (type)
10560 || INTEGRAL_TYPE_P (type))
10561 && negate_expr_p (arg1)
10562 && reorder_operands_p (arg0, arg1))
10563 return fold_build2_loc (loc, MINUS_EXPR, type,
10564 fold_convert_loc (loc, type,
10565 negate_expr (arg1)),
10566 fold_convert_loc (loc, type,
10567 TREE_OPERAND (arg0, 0)));
10568 /* Convert -A - 1 to ~A. */
10569 if (INTEGRAL_TYPE_P (type)
10570 && TREE_CODE (arg0) == NEGATE_EXPR
10571 && integer_onep (arg1)
10572 && !TYPE_OVERFLOW_TRAPS (type))
10573 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10574 fold_convert_loc (loc, type,
10575 TREE_OPERAND (arg0, 0)));
10577 /* Convert -1 - A to ~A. */
10578 if (INTEGRAL_TYPE_P (type)
10579 && integer_all_onesp (arg0))
10580 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10583 /* X - (X / CST) * CST is X % CST. */
10584 if (INTEGRAL_TYPE_P (type)
10585 && TREE_CODE (arg1) == MULT_EXPR
10586 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10587 && operand_equal_p (arg0,
10588 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10589 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10590 TREE_OPERAND (arg1, 1), 0))
10591 return
10592 fold_convert_loc (loc, type,
10593 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10594 arg0, TREE_OPERAND (arg1, 1)));
10596 if (! FLOAT_TYPE_P (type))
10598 if (integer_zerop (arg0))
10599 return negate_expr (fold_convert_loc (loc, type, arg1));
10600 if (integer_zerop (arg1))
10601 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10603 /* Fold A - (A & B) into ~B & A. */
10604 if (!TREE_SIDE_EFFECTS (arg0)
10605 && TREE_CODE (arg1) == BIT_AND_EXPR)
10607 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10609 tree arg10 = fold_convert_loc (loc, type,
10610 TREE_OPERAND (arg1, 0));
10611 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10612 fold_build1_loc (loc, BIT_NOT_EXPR,
10613 type, arg10),
10614 fold_convert_loc (loc, type, arg0));
10616 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10618 tree arg11 = fold_convert_loc (loc,
10619 type, TREE_OPERAND (arg1, 1));
10620 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10621 fold_build1_loc (loc, BIT_NOT_EXPR,
10622 type, arg11),
10623 fold_convert_loc (loc, type, arg0));
10627 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10628 any power of 2 minus 1. */
10629 if (TREE_CODE (arg0) == BIT_AND_EXPR
10630 && TREE_CODE (arg1) == BIT_AND_EXPR
10631 && operand_equal_p (TREE_OPERAND (arg0, 0),
10632 TREE_OPERAND (arg1, 0), 0))
10634 tree mask0 = TREE_OPERAND (arg0, 1);
10635 tree mask1 = TREE_OPERAND (arg1, 1);
10636 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10638 if (operand_equal_p (tem, mask1, 0))
10640 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10641 TREE_OPERAND (arg0, 0), mask1);
10642 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10647 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10648 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10649 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10651 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10652 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10653 (-ARG1 + ARG0) reduces to -ARG1. */
10654 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10655 return negate_expr (fold_convert_loc (loc, type, arg1));
10657 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10658 __complex__ ( x, -y ). This is not the same for SNaNs or if
10659 signed zeros are involved. */
10660 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10661 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10662 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10664 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10665 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10666 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10667 bool arg0rz = false, arg0iz = false;
10668 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10669 || (arg0i && (arg0iz = real_zerop (arg0i))))
10671 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10672 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10673 if (arg0rz && arg1i && real_zerop (arg1i))
10675 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10676 arg1r ? arg1r
10677 : build1 (REALPART_EXPR, rtype, arg1));
10678 tree ip = arg0i ? arg0i
10679 : build1 (IMAGPART_EXPR, rtype, arg0);
10680 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10682 else if (arg0iz && arg1r && real_zerop (arg1r))
10684 tree rp = arg0r ? arg0r
10685 : build1 (REALPART_EXPR, rtype, arg0);
10686 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10687 arg1i ? arg1i
10688 : build1 (IMAGPART_EXPR, rtype, arg1));
10689 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10694 /* Fold &x - &x. This can happen from &x.foo - &x.
10695 This is unsafe for certain floats even in non-IEEE formats.
10696 In IEEE, it is unsafe because it does wrong for NaNs.
10697 Also note that operand_equal_p is always false if an operand
10698 is volatile. */
10700 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10701 && operand_equal_p (arg0, arg1, 0))
10702 return build_zero_cst (type);
10704 /* A - B -> A + (-B) if B is easily negatable. */
10705 if (negate_expr_p (arg1)
10706 && ((FLOAT_TYPE_P (type)
10707 /* Avoid this transformation if B is a positive REAL_CST. */
10708 && (TREE_CODE (arg1) != REAL_CST
10709 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10710 || INTEGRAL_TYPE_P (type)))
10711 return fold_build2_loc (loc, PLUS_EXPR, type,
10712 fold_convert_loc (loc, type, arg0),
10713 fold_convert_loc (loc, type,
10714 negate_expr (arg1)));
10716 /* Try folding difference of addresses. */
10718 HOST_WIDE_INT diff;
10720 if ((TREE_CODE (arg0) == ADDR_EXPR
10721 || TREE_CODE (arg1) == ADDR_EXPR)
10722 && ptr_difference_const (arg0, arg1, &diff))
10723 return build_int_cst_type (type, diff);
10726 /* Fold &a[i] - &a[j] to i-j. */
10727 if (TREE_CODE (arg0) == ADDR_EXPR
10728 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10729 && TREE_CODE (arg1) == ADDR_EXPR
10730 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10732 tree tem = fold_addr_of_array_ref_difference (loc, type,
10733 TREE_OPERAND (arg0, 0),
10734 TREE_OPERAND (arg1, 0));
10735 if (tem)
10736 return tem;
10739 if (FLOAT_TYPE_P (type)
10740 && flag_unsafe_math_optimizations
10741 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10742 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10743 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10744 return tem;
10746 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10747 one. Make sure the type is not saturating and has the signedness of
10748 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10749 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10750 if ((TREE_CODE (arg0) == MULT_EXPR
10751 || TREE_CODE (arg1) == MULT_EXPR)
10752 && !TYPE_SATURATING (type)
10753 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10754 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10755 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10757 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10758 if (tem)
10759 return tem;
10762 goto associate;
10764 case MULT_EXPR:
10765 /* (-A) * (-B) -> A * B */
10766 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10767 return fold_build2_loc (loc, MULT_EXPR, type,
10768 fold_convert_loc (loc, type,
10769 TREE_OPERAND (arg0, 0)),
10770 fold_convert_loc (loc, type,
10771 negate_expr (arg1)));
10772 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10773 return fold_build2_loc (loc, MULT_EXPR, type,
10774 fold_convert_loc (loc, type,
10775 negate_expr (arg0)),
10776 fold_convert_loc (loc, type,
10777 TREE_OPERAND (arg1, 0)));
10779 if (! FLOAT_TYPE_P (type))
10781 if (integer_zerop (arg1))
10782 return omit_one_operand_loc (loc, type, arg1, arg0);
10783 if (integer_onep (arg1))
10784 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10785 /* Transform x * -1 into -x. Make sure to do the negation
10786 on the original operand with conversions not stripped
10787 because we can only strip non-sign-changing conversions. */
10788 if (integer_all_onesp (arg1))
10789 return fold_convert_loc (loc, type, negate_expr (op0));
10790 /* Transform x * -C into -x * C if x is easily negatable. */
10791 if (TREE_CODE (arg1) == INTEGER_CST
10792 && tree_int_cst_sgn (arg1) == -1
10793 && negate_expr_p (arg0)
10794 && (tem = negate_expr (arg1)) != arg1
10795 && !TREE_OVERFLOW (tem))
10796 return fold_build2_loc (loc, MULT_EXPR, type,
10797 fold_convert_loc (loc, type,
10798 negate_expr (arg0)),
10799 tem);
10801 /* (a * (1 << b)) is (a << b) */
10802 if (TREE_CODE (arg1) == LSHIFT_EXPR
10803 && integer_onep (TREE_OPERAND (arg1, 0)))
10804 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10805 TREE_OPERAND (arg1, 1));
10806 if (TREE_CODE (arg0) == LSHIFT_EXPR
10807 && integer_onep (TREE_OPERAND (arg0, 0)))
10808 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10809 TREE_OPERAND (arg0, 1));
10811 /* (A + A) * C -> A * 2 * C */
10812 if (TREE_CODE (arg0) == PLUS_EXPR
10813 && TREE_CODE (arg1) == INTEGER_CST
10814 && operand_equal_p (TREE_OPERAND (arg0, 0),
10815 TREE_OPERAND (arg0, 1), 0))
10816 return fold_build2_loc (loc, MULT_EXPR, type,
10817 omit_one_operand_loc (loc, type,
10818 TREE_OPERAND (arg0, 0),
10819 TREE_OPERAND (arg0, 1)),
10820 fold_build2_loc (loc, MULT_EXPR, type,
10821 build_int_cst (type, 2) , arg1));
10823 strict_overflow_p = false;
10824 if (TREE_CODE (arg1) == INTEGER_CST
10825 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10826 &strict_overflow_p)))
10828 if (strict_overflow_p)
10829 fold_overflow_warning (("assuming signed overflow does not "
10830 "occur when simplifying "
10831 "multiplication"),
10832 WARN_STRICT_OVERFLOW_MISC);
10833 return fold_convert_loc (loc, type, tem);
10836 /* Optimize z * conj(z) for integer complex numbers. */
10837 if (TREE_CODE (arg0) == CONJ_EXPR
10838 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10839 return fold_mult_zconjz (loc, type, arg1);
10840 if (TREE_CODE (arg1) == CONJ_EXPR
10841 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10842 return fold_mult_zconjz (loc, type, arg0);
10844 else
10846 /* Maybe fold x * 0 to 0. The expressions aren't the same
10847 when x is NaN, since x * 0 is also NaN. Nor are they the
10848 same in modes with signed zeros, since multiplying a
10849 negative value by 0 gives -0, not +0. */
10850 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10851 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10852 && real_zerop (arg1))
10853 return omit_one_operand_loc (loc, type, arg1, arg0);
10854 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10855 Likewise for complex arithmetic with signed zeros. */
10856 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10857 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10858 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10859 && real_onep (arg1))
10860 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10862 /* Transform x * -1.0 into -x. */
10863 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10864 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10865 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10866 && real_minus_onep (arg1))
10867 return fold_convert_loc (loc, type, negate_expr (arg0));
10869 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10870 the result for floating point types due to rounding so it is applied
10871 only if -fassociative-math was specify. */
10872 if (flag_associative_math
10873 && TREE_CODE (arg0) == RDIV_EXPR
10874 && TREE_CODE (arg1) == REAL_CST
10875 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10877 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10878 arg1);
10879 if (tem)
10880 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10881 TREE_OPERAND (arg0, 1));
10884 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10885 if (operand_equal_p (arg0, arg1, 0))
10887 tree tem = fold_strip_sign_ops (arg0);
10888 if (tem != NULL_TREE)
10890 tem = fold_convert_loc (loc, type, tem);
10891 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10895 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10896 This is not the same for NaNs or if signed zeros are
10897 involved. */
10898 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10899 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10900 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10901 && TREE_CODE (arg1) == COMPLEX_CST
10902 && real_zerop (TREE_REALPART (arg1)))
10904 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10905 if (real_onep (TREE_IMAGPART (arg1)))
10906 return
10907 fold_build2_loc (loc, COMPLEX_EXPR, type,
10908 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10909 rtype, arg0)),
10910 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10911 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10912 return
10913 fold_build2_loc (loc, COMPLEX_EXPR, type,
10914 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10915 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10916 rtype, arg0)));
10919 /* Optimize z * conj(z) for floating point complex numbers.
10920 Guarded by flag_unsafe_math_optimizations as non-finite
10921 imaginary components don't produce scalar results. */
10922 if (flag_unsafe_math_optimizations
10923 && TREE_CODE (arg0) == CONJ_EXPR
10924 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10925 return fold_mult_zconjz (loc, type, arg1);
10926 if (flag_unsafe_math_optimizations
10927 && TREE_CODE (arg1) == CONJ_EXPR
10928 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10929 return fold_mult_zconjz (loc, type, arg0);
10931 if (flag_unsafe_math_optimizations)
10933 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10934 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10936 /* Optimizations of root(...)*root(...). */
10937 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10939 tree rootfn, arg;
10940 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10941 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10943 /* Optimize sqrt(x)*sqrt(x) as x. */
10944 if (BUILTIN_SQRT_P (fcode0)
10945 && operand_equal_p (arg00, arg10, 0)
10946 && ! HONOR_SNANS (TYPE_MODE (type)))
10947 return arg00;
10949 /* Optimize root(x)*root(y) as root(x*y). */
10950 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10951 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10952 return build_call_expr_loc (loc, rootfn, 1, arg);
10955 /* Optimize expN(x)*expN(y) as expN(x+y). */
10956 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10958 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10959 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10960 CALL_EXPR_ARG (arg0, 0),
10961 CALL_EXPR_ARG (arg1, 0));
10962 return build_call_expr_loc (loc, expfn, 1, arg);
10965 /* Optimizations of pow(...)*pow(...). */
10966 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10967 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10968 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10970 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10971 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10972 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10973 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10975 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10976 if (operand_equal_p (arg01, arg11, 0))
10978 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10979 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10980 arg00, arg10);
10981 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10984 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10985 if (operand_equal_p (arg00, arg10, 0))
10987 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10988 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10989 arg01, arg11);
10990 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10994 /* Optimize tan(x)*cos(x) as sin(x). */
10995 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10996 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10997 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10998 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10999 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11000 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11001 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11002 CALL_EXPR_ARG (arg1, 0), 0))
11004 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11006 if (sinfn != NULL_TREE)
11007 return build_call_expr_loc (loc, sinfn, 1,
11008 CALL_EXPR_ARG (arg0, 0));
11011 /* Optimize x*pow(x,c) as pow(x,c+1). */
11012 if (fcode1 == BUILT_IN_POW
11013 || fcode1 == BUILT_IN_POWF
11014 || fcode1 == BUILT_IN_POWL)
11016 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11017 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11018 if (TREE_CODE (arg11) == REAL_CST
11019 && !TREE_OVERFLOW (arg11)
11020 && operand_equal_p (arg0, arg10, 0))
11022 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11023 REAL_VALUE_TYPE c;
11024 tree arg;
11026 c = TREE_REAL_CST (arg11);
11027 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11028 arg = build_real (type, c);
11029 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11033 /* Optimize pow(x,c)*x as pow(x,c+1). */
11034 if (fcode0 == BUILT_IN_POW
11035 || fcode0 == BUILT_IN_POWF
11036 || fcode0 == BUILT_IN_POWL)
11038 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11039 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11040 if (TREE_CODE (arg01) == REAL_CST
11041 && !TREE_OVERFLOW (arg01)
11042 && operand_equal_p (arg1, arg00, 0))
11044 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11045 REAL_VALUE_TYPE c;
11046 tree arg;
11048 c = TREE_REAL_CST (arg01);
11049 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11050 arg = build_real (type, c);
11051 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11055 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11056 if (!in_gimple_form
11057 && optimize
11058 && operand_equal_p (arg0, arg1, 0))
11060 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11062 if (powfn)
11064 tree arg = build_real (type, dconst2);
11065 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11070 goto associate;
11072 case BIT_IOR_EXPR:
11073 bit_ior:
11074 if (integer_all_onesp (arg1))
11075 return omit_one_operand_loc (loc, type, arg1, arg0);
11076 if (integer_zerop (arg1))
11077 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11078 if (operand_equal_p (arg0, arg1, 0))
11079 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11081 /* ~X | X is -1. */
11082 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11083 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11085 t1 = build_zero_cst (type);
11086 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11087 return omit_one_operand_loc (loc, type, t1, arg1);
11090 /* X | ~X is -1. */
11091 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11092 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11094 t1 = build_zero_cst (type);
11095 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11096 return omit_one_operand_loc (loc, type, t1, arg0);
11099 /* Canonicalize (X & C1) | C2. */
11100 if (TREE_CODE (arg0) == BIT_AND_EXPR
11101 && TREE_CODE (arg1) == INTEGER_CST
11102 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11104 double_int c1, c2, c3, msk;
11105 int width = TYPE_PRECISION (type), w;
11106 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11107 c2 = tree_to_double_int (arg1);
11109 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11110 if ((c1 & c2) == c1)
11111 return omit_one_operand_loc (loc, type, arg1,
11112 TREE_OPERAND (arg0, 0));
11114 msk = double_int::mask (width);
11116 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11117 if (msk.and_not (c1 | c2).is_zero ())
11118 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11119 TREE_OPERAND (arg0, 0), arg1);
11121 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11122 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11123 mode which allows further optimizations. */
11124 c1 &= msk;
11125 c2 &= msk;
11126 c3 = c1.and_not (c2);
11127 for (w = BITS_PER_UNIT;
11128 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11129 w <<= 1)
11131 unsigned HOST_WIDE_INT mask
11132 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11133 if (((c1.low | c2.low) & mask) == mask
11134 && (c1.low & ~mask) == 0 && c1.high == 0)
11136 c3 = double_int::from_uhwi (mask);
11137 break;
11140 if (c3 != c1)
11141 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11142 fold_build2_loc (loc, BIT_AND_EXPR, type,
11143 TREE_OPERAND (arg0, 0),
11144 double_int_to_tree (type,
11145 c3)),
11146 arg1);
11149 /* (X & Y) | Y is (X, Y). */
11150 if (TREE_CODE (arg0) == BIT_AND_EXPR
11151 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11152 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11153 /* (X & Y) | X is (Y, X). */
11154 if (TREE_CODE (arg0) == BIT_AND_EXPR
11155 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11156 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11157 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11158 /* X | (X & Y) is (Y, X). */
11159 if (TREE_CODE (arg1) == BIT_AND_EXPR
11160 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11161 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11162 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11163 /* X | (Y & X) is (Y, X). */
11164 if (TREE_CODE (arg1) == BIT_AND_EXPR
11165 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11166 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11167 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11169 /* (X & ~Y) | (~X & Y) is X ^ Y */
11170 if (TREE_CODE (arg0) == BIT_AND_EXPR
11171 && TREE_CODE (arg1) == BIT_AND_EXPR)
11173 tree a0, a1, l0, l1, n0, n1;
11175 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11176 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11178 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11179 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11181 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11182 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11184 if ((operand_equal_p (n0, a0, 0)
11185 && operand_equal_p (n1, a1, 0))
11186 || (operand_equal_p (n0, a1, 0)
11187 && operand_equal_p (n1, a0, 0)))
11188 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11191 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11192 if (t1 != NULL_TREE)
11193 return t1;
11195 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11197 This results in more efficient code for machines without a NAND
11198 instruction. Combine will canonicalize to the first form
11199 which will allow use of NAND instructions provided by the
11200 backend if they exist. */
11201 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11202 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11204 return
11205 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11206 build2 (BIT_AND_EXPR, type,
11207 fold_convert_loc (loc, type,
11208 TREE_OPERAND (arg0, 0)),
11209 fold_convert_loc (loc, type,
11210 TREE_OPERAND (arg1, 0))));
11213 /* See if this can be simplified into a rotate first. If that
11214 is unsuccessful continue in the association code. */
11215 goto bit_rotate;
11217 case BIT_XOR_EXPR:
11218 if (integer_zerop (arg1))
11219 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11220 if (integer_all_onesp (arg1))
11221 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11222 if (operand_equal_p (arg0, arg1, 0))
11223 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11225 /* ~X ^ X is -1. */
11226 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11227 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11229 t1 = build_zero_cst (type);
11230 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11231 return omit_one_operand_loc (loc, type, t1, arg1);
11234 /* X ^ ~X is -1. */
11235 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11236 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11238 t1 = build_zero_cst (type);
11239 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11240 return omit_one_operand_loc (loc, type, t1, arg0);
11243 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11244 with a constant, and the two constants have no bits in common,
11245 we should treat this as a BIT_IOR_EXPR since this may produce more
11246 simplifications. */
11247 if (TREE_CODE (arg0) == BIT_AND_EXPR
11248 && TREE_CODE (arg1) == BIT_AND_EXPR
11249 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11250 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11251 && integer_zerop (const_binop (BIT_AND_EXPR,
11252 TREE_OPERAND (arg0, 1),
11253 TREE_OPERAND (arg1, 1))))
11255 code = BIT_IOR_EXPR;
11256 goto bit_ior;
11259 /* (X | Y) ^ X -> Y & ~ X*/
11260 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11261 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11263 tree t2 = TREE_OPERAND (arg0, 1);
11264 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11265 arg1);
11266 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11267 fold_convert_loc (loc, type, t2),
11268 fold_convert_loc (loc, type, t1));
11269 return t1;
11272 /* (Y | X) ^ X -> Y & ~ X*/
11273 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11274 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11276 tree t2 = TREE_OPERAND (arg0, 0);
11277 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11278 arg1);
11279 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11280 fold_convert_loc (loc, type, t2),
11281 fold_convert_loc (loc, type, t1));
11282 return t1;
11285 /* X ^ (X | Y) -> Y & ~ X*/
11286 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11287 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11289 tree t2 = TREE_OPERAND (arg1, 1);
11290 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11291 arg0);
11292 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11293 fold_convert_loc (loc, type, t2),
11294 fold_convert_loc (loc, type, t1));
11295 return t1;
11298 /* X ^ (Y | X) -> Y & ~ X*/
11299 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11300 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11302 tree t2 = TREE_OPERAND (arg1, 0);
11303 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11304 arg0);
11305 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11306 fold_convert_loc (loc, type, t2),
11307 fold_convert_loc (loc, type, t1));
11308 return t1;
11311 /* Convert ~X ^ ~Y to X ^ Y. */
11312 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11313 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11314 return fold_build2_loc (loc, code, type,
11315 fold_convert_loc (loc, type,
11316 TREE_OPERAND (arg0, 0)),
11317 fold_convert_loc (loc, type,
11318 TREE_OPERAND (arg1, 0)));
11320 /* Convert ~X ^ C to X ^ ~C. */
11321 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11322 && TREE_CODE (arg1) == INTEGER_CST)
11323 return fold_build2_loc (loc, code, type,
11324 fold_convert_loc (loc, type,
11325 TREE_OPERAND (arg0, 0)),
11326 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11328 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11329 if (TREE_CODE (arg0) == BIT_AND_EXPR
11330 && integer_onep (TREE_OPERAND (arg0, 1))
11331 && integer_onep (arg1))
11332 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11333 build_zero_cst (TREE_TYPE (arg0)));
11335 /* Fold (X & Y) ^ Y as ~X & Y. */
11336 if (TREE_CODE (arg0) == BIT_AND_EXPR
11337 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11339 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11340 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11341 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11342 fold_convert_loc (loc, type, arg1));
11344 /* Fold (X & Y) ^ X as ~Y & X. */
11345 if (TREE_CODE (arg0) == BIT_AND_EXPR
11346 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11347 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11349 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11350 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11351 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11352 fold_convert_loc (loc, type, arg1));
11354 /* Fold X ^ (X & Y) as X & ~Y. */
11355 if (TREE_CODE (arg1) == BIT_AND_EXPR
11356 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11358 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11359 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11360 fold_convert_loc (loc, type, arg0),
11361 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11363 /* Fold X ^ (Y & X) as ~Y & X. */
11364 if (TREE_CODE (arg1) == BIT_AND_EXPR
11365 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11366 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11368 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11369 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11370 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11371 fold_convert_loc (loc, type, arg0));
11374 /* See if this can be simplified into a rotate first. If that
11375 is unsuccessful continue in the association code. */
11376 goto bit_rotate;
11378 case BIT_AND_EXPR:
11379 if (integer_all_onesp (arg1))
11380 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11381 if (integer_zerop (arg1))
11382 return omit_one_operand_loc (loc, type, arg1, arg0);
11383 if (operand_equal_p (arg0, arg1, 0))
11384 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11386 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11387 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11388 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11389 || (TREE_CODE (arg0) == EQ_EXPR
11390 && integer_zerop (TREE_OPERAND (arg0, 1))))
11391 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11392 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11394 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11395 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11396 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11397 || (TREE_CODE (arg1) == EQ_EXPR
11398 && integer_zerop (TREE_OPERAND (arg1, 1))))
11399 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11400 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11402 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11403 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11404 && TREE_CODE (arg1) == INTEGER_CST
11405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11407 tree tmp1 = fold_convert_loc (loc, type, arg1);
11408 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11409 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11410 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11411 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11412 return
11413 fold_convert_loc (loc, type,
11414 fold_build2_loc (loc, BIT_IOR_EXPR,
11415 type, tmp2, tmp3));
11418 /* (X | Y) & Y is (X, Y). */
11419 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11420 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11421 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11422 /* (X | Y) & X is (Y, X). */
11423 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11424 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11425 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11426 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11427 /* X & (X | Y) is (Y, X). */
11428 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11429 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11430 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11431 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11432 /* X & (Y | X) is (Y, X). */
11433 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11434 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11435 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11436 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11438 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11439 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11440 && integer_onep (TREE_OPERAND (arg0, 1))
11441 && integer_onep (arg1))
11443 tree tem2;
11444 tem = TREE_OPERAND (arg0, 0);
11445 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11446 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11447 tem, tem2);
11448 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11449 build_zero_cst (TREE_TYPE (tem)));
11451 /* Fold ~X & 1 as (X & 1) == 0. */
11452 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11453 && integer_onep (arg1))
11455 tree tem2;
11456 tem = TREE_OPERAND (arg0, 0);
11457 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11458 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11459 tem, tem2);
11460 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11461 build_zero_cst (TREE_TYPE (tem)));
11463 /* Fold !X & 1 as X == 0. */
11464 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11465 && integer_onep (arg1))
11467 tem = TREE_OPERAND (arg0, 0);
11468 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11469 build_zero_cst (TREE_TYPE (tem)));
11472 /* Fold (X ^ Y) & Y as ~X & Y. */
11473 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11474 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11476 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11477 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11478 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11479 fold_convert_loc (loc, type, arg1));
11481 /* Fold (X ^ Y) & X as ~Y & X. */
11482 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11483 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11484 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11486 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11487 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11488 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11489 fold_convert_loc (loc, type, arg1));
11491 /* Fold X & (X ^ Y) as X & ~Y. */
11492 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11493 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11495 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11496 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11497 fold_convert_loc (loc, type, arg0),
11498 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11500 /* Fold X & (Y ^ X) as ~Y & X. */
11501 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11502 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11503 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11505 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11506 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11507 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11508 fold_convert_loc (loc, type, arg0));
11511 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11512 multiple of 1 << CST. */
11513 if (TREE_CODE (arg1) == INTEGER_CST)
11515 double_int cst1 = tree_to_double_int (arg1);
11516 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11517 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11518 if ((cst1 & ncst1) == ncst1
11519 && multiple_of_p (type, arg0,
11520 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11521 return fold_convert_loc (loc, type, arg0);
11524 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11525 bits from CST2. */
11526 if (TREE_CODE (arg1) == INTEGER_CST
11527 && TREE_CODE (arg0) == MULT_EXPR
11528 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11530 int arg1tz
11531 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11532 if (arg1tz > 0)
11534 double_int arg1mask, masked;
11535 arg1mask = ~double_int::mask (arg1tz);
11536 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11537 TYPE_UNSIGNED (type));
11538 masked = arg1mask & tree_to_double_int (arg1);
11539 if (masked.is_zero ())
11540 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11541 arg0, arg1);
11542 else if (masked != tree_to_double_int (arg1))
11543 return fold_build2_loc (loc, code, type, op0,
11544 double_int_to_tree (type, masked));
11548 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11549 ((A & N) + B) & M -> (A + B) & M
11550 Similarly if (N & M) == 0,
11551 ((A | N) + B) & M -> (A + B) & M
11552 and for - instead of + (or unary - instead of +)
11553 and/or ^ instead of |.
11554 If B is constant and (B & M) == 0, fold into A & M. */
11555 if (host_integerp (arg1, 1))
11557 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11558 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11559 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11560 && (TREE_CODE (arg0) == PLUS_EXPR
11561 || TREE_CODE (arg0) == MINUS_EXPR
11562 || TREE_CODE (arg0) == NEGATE_EXPR)
11563 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11564 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11566 tree pmop[2];
11567 int which = 0;
11568 unsigned HOST_WIDE_INT cst0;
11570 /* Now we know that arg0 is (C + D) or (C - D) or
11571 -C and arg1 (M) is == (1LL << cst) - 1.
11572 Store C into PMOP[0] and D into PMOP[1]. */
11573 pmop[0] = TREE_OPERAND (arg0, 0);
11574 pmop[1] = NULL;
11575 if (TREE_CODE (arg0) != NEGATE_EXPR)
11577 pmop[1] = TREE_OPERAND (arg0, 1);
11578 which = 1;
11581 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11582 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11583 & cst1) != cst1)
11584 which = -1;
11586 for (; which >= 0; which--)
11587 switch (TREE_CODE (pmop[which]))
11589 case BIT_AND_EXPR:
11590 case BIT_IOR_EXPR:
11591 case BIT_XOR_EXPR:
11592 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11593 != INTEGER_CST)
11594 break;
11595 /* tree_low_cst not used, because we don't care about
11596 the upper bits. */
11597 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11598 cst0 &= cst1;
11599 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11601 if (cst0 != cst1)
11602 break;
11604 else if (cst0 != 0)
11605 break;
11606 /* If C or D is of the form (A & N) where
11607 (N & M) == M, or of the form (A | N) or
11608 (A ^ N) where (N & M) == 0, replace it with A. */
11609 pmop[which] = TREE_OPERAND (pmop[which], 0);
11610 break;
11611 case INTEGER_CST:
11612 /* If C or D is a N where (N & M) == 0, it can be
11613 omitted (assumed 0). */
11614 if ((TREE_CODE (arg0) == PLUS_EXPR
11615 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11616 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11617 pmop[which] = NULL;
11618 break;
11619 default:
11620 break;
11623 /* Only build anything new if we optimized one or both arguments
11624 above. */
11625 if (pmop[0] != TREE_OPERAND (arg0, 0)
11626 || (TREE_CODE (arg0) != NEGATE_EXPR
11627 && pmop[1] != TREE_OPERAND (arg0, 1)))
11629 tree utype = TREE_TYPE (arg0);
11630 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11632 /* Perform the operations in a type that has defined
11633 overflow behavior. */
11634 utype = unsigned_type_for (TREE_TYPE (arg0));
11635 if (pmop[0] != NULL)
11636 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11637 if (pmop[1] != NULL)
11638 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11641 if (TREE_CODE (arg0) == NEGATE_EXPR)
11642 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11643 else if (TREE_CODE (arg0) == PLUS_EXPR)
11645 if (pmop[0] != NULL && pmop[1] != NULL)
11646 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11647 pmop[0], pmop[1]);
11648 else if (pmop[0] != NULL)
11649 tem = pmop[0];
11650 else if (pmop[1] != NULL)
11651 tem = pmop[1];
11652 else
11653 return build_int_cst (type, 0);
11655 else if (pmop[0] == NULL)
11656 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11657 else
11658 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11659 pmop[0], pmop[1]);
11660 /* TEM is now the new binary +, - or unary - replacement. */
11661 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11662 fold_convert_loc (loc, utype, arg1));
11663 return fold_convert_loc (loc, type, tem);
11668 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11669 if (t1 != NULL_TREE)
11670 return t1;
11671 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11672 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11673 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11675 unsigned int prec
11676 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11678 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11679 && (~TREE_INT_CST_LOW (arg1)
11680 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11681 return
11682 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11685 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11687 This results in more efficient code for machines without a NOR
11688 instruction. Combine will canonicalize to the first form
11689 which will allow use of NOR instructions provided by the
11690 backend if they exist. */
11691 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11692 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11694 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11695 build2 (BIT_IOR_EXPR, type,
11696 fold_convert_loc (loc, type,
11697 TREE_OPERAND (arg0, 0)),
11698 fold_convert_loc (loc, type,
11699 TREE_OPERAND (arg1, 0))));
11702 /* If arg0 is derived from the address of an object or function, we may
11703 be able to fold this expression using the object or function's
11704 alignment. */
11705 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11707 unsigned HOST_WIDE_INT modulus, residue;
11708 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11710 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11711 integer_onep (arg1));
11713 /* This works because modulus is a power of 2. If this weren't the
11714 case, we'd have to replace it by its greatest power-of-2
11715 divisor: modulus & -modulus. */
11716 if (low < modulus)
11717 return build_int_cst (type, residue & low);
11720 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11721 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11722 if the new mask might be further optimized. */
11723 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11724 || TREE_CODE (arg0) == RSHIFT_EXPR)
11725 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11726 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11727 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11728 < TYPE_PRECISION (TREE_TYPE (arg0))
11729 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11730 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11732 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11733 unsigned HOST_WIDE_INT mask
11734 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11735 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11736 tree shift_type = TREE_TYPE (arg0);
11738 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11739 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11740 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11741 && TYPE_PRECISION (TREE_TYPE (arg0))
11742 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11744 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11745 tree arg00 = TREE_OPERAND (arg0, 0);
11746 /* See if more bits can be proven as zero because of
11747 zero extension. */
11748 if (TREE_CODE (arg00) == NOP_EXPR
11749 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11751 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11752 if (TYPE_PRECISION (inner_type)
11753 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11754 && TYPE_PRECISION (inner_type) < prec)
11756 prec = TYPE_PRECISION (inner_type);
11757 /* See if we can shorten the right shift. */
11758 if (shiftc < prec)
11759 shift_type = inner_type;
11762 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11763 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11764 zerobits <<= prec - shiftc;
11765 /* For arithmetic shift if sign bit could be set, zerobits
11766 can contain actually sign bits, so no transformation is
11767 possible, unless MASK masks them all away. In that
11768 case the shift needs to be converted into logical shift. */
11769 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11770 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11772 if ((mask & zerobits) == 0)
11773 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11774 else
11775 zerobits = 0;
11779 /* ((X << 16) & 0xff00) is (X, 0). */
11780 if ((mask & zerobits) == mask)
11781 return omit_one_operand_loc (loc, type,
11782 build_int_cst (type, 0), arg0);
11784 newmask = mask | zerobits;
11785 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11787 unsigned int prec;
11789 /* Only do the transformation if NEWMASK is some integer
11790 mode's mask. */
11791 for (prec = BITS_PER_UNIT;
11792 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11793 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11794 break;
11795 if (prec < HOST_BITS_PER_WIDE_INT
11796 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11798 tree newmaskt;
11800 if (shift_type != TREE_TYPE (arg0))
11802 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11803 fold_convert_loc (loc, shift_type,
11804 TREE_OPERAND (arg0, 0)),
11805 TREE_OPERAND (arg0, 1));
11806 tem = fold_convert_loc (loc, type, tem);
11808 else
11809 tem = op0;
11810 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11811 if (!tree_int_cst_equal (newmaskt, arg1))
11812 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11817 goto associate;
11819 case RDIV_EXPR:
11820 /* Don't touch a floating-point divide by zero unless the mode
11821 of the constant can represent infinity. */
11822 if (TREE_CODE (arg1) == REAL_CST
11823 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11824 && real_zerop (arg1))
11825 return NULL_TREE;
11827 /* Optimize A / A to 1.0 if we don't care about
11828 NaNs or Infinities. Skip the transformation
11829 for non-real operands. */
11830 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11831 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11832 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11833 && operand_equal_p (arg0, arg1, 0))
11835 tree r = build_real (TREE_TYPE (arg0), dconst1);
11837 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11840 /* The complex version of the above A / A optimization. */
11841 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11842 && operand_equal_p (arg0, arg1, 0))
11844 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11845 if (! HONOR_NANS (TYPE_MODE (elem_type))
11846 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11848 tree r = build_real (elem_type, dconst1);
11849 /* omit_two_operands will call fold_convert for us. */
11850 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11854 /* (-A) / (-B) -> A / B */
11855 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11856 return fold_build2_loc (loc, RDIV_EXPR, type,
11857 TREE_OPERAND (arg0, 0),
11858 negate_expr (arg1));
11859 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11860 return fold_build2_loc (loc, RDIV_EXPR, type,
11861 negate_expr (arg0),
11862 TREE_OPERAND (arg1, 0));
11864 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11865 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11866 && real_onep (arg1))
11867 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11869 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11870 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11871 && real_minus_onep (arg1))
11872 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11873 negate_expr (arg0)));
11875 /* If ARG1 is a constant, we can convert this to a multiply by the
11876 reciprocal. This does not have the same rounding properties,
11877 so only do this if -freciprocal-math. We can actually
11878 always safely do it if ARG1 is a power of two, but it's hard to
11879 tell if it is or not in a portable manner. */
11880 if (optimize
11881 && (TREE_CODE (arg1) == REAL_CST
11882 || (TREE_CODE (arg1) == COMPLEX_CST
11883 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11884 || (TREE_CODE (arg1) == VECTOR_CST
11885 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11887 if (flag_reciprocal_math
11888 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11889 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11890 /* Find the reciprocal if optimizing and the result is exact.
11891 TODO: Complex reciprocal not implemented. */
11892 if (TREE_CODE (arg1) != COMPLEX_CST)
11894 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11896 if (inverse)
11897 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11900 /* Convert A/B/C to A/(B*C). */
11901 if (flag_reciprocal_math
11902 && TREE_CODE (arg0) == RDIV_EXPR)
11903 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11904 fold_build2_loc (loc, MULT_EXPR, type,
11905 TREE_OPERAND (arg0, 1), arg1));
11907 /* Convert A/(B/C) to (A/B)*C. */
11908 if (flag_reciprocal_math
11909 && TREE_CODE (arg1) == RDIV_EXPR)
11910 return fold_build2_loc (loc, MULT_EXPR, type,
11911 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11912 TREE_OPERAND (arg1, 0)),
11913 TREE_OPERAND (arg1, 1));
11915 /* Convert C1/(X*C2) into (C1/C2)/X. */
11916 if (flag_reciprocal_math
11917 && TREE_CODE (arg1) == MULT_EXPR
11918 && TREE_CODE (arg0) == REAL_CST
11919 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11921 tree tem = const_binop (RDIV_EXPR, arg0,
11922 TREE_OPERAND (arg1, 1));
11923 if (tem)
11924 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11925 TREE_OPERAND (arg1, 0));
11928 if (flag_unsafe_math_optimizations)
11930 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11931 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11933 /* Optimize sin(x)/cos(x) as tan(x). */
11934 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11935 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11936 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11937 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11938 CALL_EXPR_ARG (arg1, 0), 0))
11940 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11942 if (tanfn != NULL_TREE)
11943 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11946 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11947 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11948 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11949 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11950 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11951 CALL_EXPR_ARG (arg1, 0), 0))
11953 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11955 if (tanfn != NULL_TREE)
11957 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11958 CALL_EXPR_ARG (arg0, 0));
11959 return fold_build2_loc (loc, RDIV_EXPR, type,
11960 build_real (type, dconst1), tmp);
11964 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11965 NaNs or Infinities. */
11966 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11967 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11968 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11970 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11971 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11973 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11974 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11975 && operand_equal_p (arg00, arg01, 0))
11977 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11979 if (cosfn != NULL_TREE)
11980 return build_call_expr_loc (loc, cosfn, 1, arg00);
11984 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11985 NaNs or Infinities. */
11986 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11987 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11988 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11990 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11991 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11993 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11994 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11995 && operand_equal_p (arg00, arg01, 0))
11997 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11999 if (cosfn != NULL_TREE)
12001 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12002 return fold_build2_loc (loc, RDIV_EXPR, type,
12003 build_real (type, dconst1),
12004 tmp);
12009 /* Optimize pow(x,c)/x as pow(x,c-1). */
12010 if (fcode0 == BUILT_IN_POW
12011 || fcode0 == BUILT_IN_POWF
12012 || fcode0 == BUILT_IN_POWL)
12014 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12015 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12016 if (TREE_CODE (arg01) == REAL_CST
12017 && !TREE_OVERFLOW (arg01)
12018 && operand_equal_p (arg1, arg00, 0))
12020 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12021 REAL_VALUE_TYPE c;
12022 tree arg;
12024 c = TREE_REAL_CST (arg01);
12025 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12026 arg = build_real (type, c);
12027 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12031 /* Optimize a/root(b/c) into a*root(c/b). */
12032 if (BUILTIN_ROOT_P (fcode1))
12034 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12036 if (TREE_CODE (rootarg) == RDIV_EXPR)
12038 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12039 tree b = TREE_OPERAND (rootarg, 0);
12040 tree c = TREE_OPERAND (rootarg, 1);
12042 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12044 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12045 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12049 /* Optimize x/expN(y) into x*expN(-y). */
12050 if (BUILTIN_EXPONENT_P (fcode1))
12052 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12053 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12054 arg1 = build_call_expr_loc (loc,
12055 expfn, 1,
12056 fold_convert_loc (loc, type, arg));
12057 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12060 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12061 if (fcode1 == BUILT_IN_POW
12062 || fcode1 == BUILT_IN_POWF
12063 || fcode1 == BUILT_IN_POWL)
12065 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12066 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12067 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12068 tree neg11 = fold_convert_loc (loc, type,
12069 negate_expr (arg11));
12070 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12071 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12074 return NULL_TREE;
12076 case TRUNC_DIV_EXPR:
12077 /* Optimize (X & (-A)) / A where A is a power of 2,
12078 to X >> log2(A) */
12079 if (TREE_CODE (arg0) == BIT_AND_EXPR
12080 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12081 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12083 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12084 arg1, TREE_OPERAND (arg0, 1));
12085 if (sum && integer_zerop (sum)) {
12086 unsigned long pow2;
12088 if (TREE_INT_CST_LOW (arg1))
12089 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12090 else
12091 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12092 + HOST_BITS_PER_WIDE_INT;
12094 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12095 TREE_OPERAND (arg0, 0),
12096 build_int_cst (integer_type_node, pow2));
12100 /* Fall through */
12102 case FLOOR_DIV_EXPR:
12103 /* Simplify A / (B << N) where A and B are positive and B is
12104 a power of 2, to A >> (N + log2(B)). */
12105 strict_overflow_p = false;
12106 if (TREE_CODE (arg1) == LSHIFT_EXPR
12107 && (TYPE_UNSIGNED (type)
12108 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12110 tree sval = TREE_OPERAND (arg1, 0);
12111 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12113 tree sh_cnt = TREE_OPERAND (arg1, 1);
12114 unsigned long pow2;
12116 if (TREE_INT_CST_LOW (sval))
12117 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12118 else
12119 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12120 + HOST_BITS_PER_WIDE_INT;
12122 if (strict_overflow_p)
12123 fold_overflow_warning (("assuming signed overflow does not "
12124 "occur when simplifying A / (B << N)"),
12125 WARN_STRICT_OVERFLOW_MISC);
12127 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12128 sh_cnt,
12129 build_int_cst (TREE_TYPE (sh_cnt),
12130 pow2));
12131 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12132 fold_convert_loc (loc, type, arg0), sh_cnt);
12136 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12137 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12138 if (INTEGRAL_TYPE_P (type)
12139 && TYPE_UNSIGNED (type)
12140 && code == FLOOR_DIV_EXPR)
12141 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12143 /* Fall through */
12145 case ROUND_DIV_EXPR:
12146 case CEIL_DIV_EXPR:
12147 case EXACT_DIV_EXPR:
12148 if (integer_onep (arg1))
12149 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12150 if (integer_zerop (arg1))
12151 return NULL_TREE;
12152 /* X / -1 is -X. */
12153 if (!TYPE_UNSIGNED (type)
12154 && TREE_CODE (arg1) == INTEGER_CST
12155 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12156 && TREE_INT_CST_HIGH (arg1) == -1)
12157 return fold_convert_loc (loc, type, negate_expr (arg0));
12159 /* Convert -A / -B to A / B when the type is signed and overflow is
12160 undefined. */
12161 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12162 && TREE_CODE (arg0) == NEGATE_EXPR
12163 && negate_expr_p (arg1))
12165 if (INTEGRAL_TYPE_P (type))
12166 fold_overflow_warning (("assuming signed overflow does not occur "
12167 "when distributing negation across "
12168 "division"),
12169 WARN_STRICT_OVERFLOW_MISC);
12170 return fold_build2_loc (loc, code, type,
12171 fold_convert_loc (loc, type,
12172 TREE_OPERAND (arg0, 0)),
12173 fold_convert_loc (loc, type,
12174 negate_expr (arg1)));
12176 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12177 && TREE_CODE (arg1) == NEGATE_EXPR
12178 && negate_expr_p (arg0))
12180 if (INTEGRAL_TYPE_P (type))
12181 fold_overflow_warning (("assuming signed overflow does not occur "
12182 "when distributing negation across "
12183 "division"),
12184 WARN_STRICT_OVERFLOW_MISC);
12185 return fold_build2_loc (loc, code, type,
12186 fold_convert_loc (loc, type,
12187 negate_expr (arg0)),
12188 fold_convert_loc (loc, type,
12189 TREE_OPERAND (arg1, 0)));
12192 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12193 operation, EXACT_DIV_EXPR.
12195 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12196 At one time others generated faster code, it's not clear if they do
12197 after the last round to changes to the DIV code in expmed.c. */
12198 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12199 && multiple_of_p (type, arg0, arg1))
12200 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12202 strict_overflow_p = false;
12203 if (TREE_CODE (arg1) == INTEGER_CST
12204 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12205 &strict_overflow_p)))
12207 if (strict_overflow_p)
12208 fold_overflow_warning (("assuming signed overflow does not occur "
12209 "when simplifying division"),
12210 WARN_STRICT_OVERFLOW_MISC);
12211 return fold_convert_loc (loc, type, tem);
12214 return NULL_TREE;
12216 case CEIL_MOD_EXPR:
12217 case FLOOR_MOD_EXPR:
12218 case ROUND_MOD_EXPR:
12219 case TRUNC_MOD_EXPR:
12220 /* X % 1 is always zero, but be sure to preserve any side
12221 effects in X. */
12222 if (integer_onep (arg1))
12223 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12225 /* X % 0, return X % 0 unchanged so that we can get the
12226 proper warnings and errors. */
12227 if (integer_zerop (arg1))
12228 return NULL_TREE;
12230 /* 0 % X is always zero, but be sure to preserve any side
12231 effects in X. Place this after checking for X == 0. */
12232 if (integer_zerop (arg0))
12233 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12235 /* X % -1 is zero. */
12236 if (!TYPE_UNSIGNED (type)
12237 && TREE_CODE (arg1) == INTEGER_CST
12238 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12239 && TREE_INT_CST_HIGH (arg1) == -1)
12240 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12242 /* X % -C is the same as X % C. */
12243 if (code == TRUNC_MOD_EXPR
12244 && !TYPE_UNSIGNED (type)
12245 && TREE_CODE (arg1) == INTEGER_CST
12246 && !TREE_OVERFLOW (arg1)
12247 && TREE_INT_CST_HIGH (arg1) < 0
12248 && !TYPE_OVERFLOW_TRAPS (type)
12249 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12250 && !sign_bit_p (arg1, arg1))
12251 return fold_build2_loc (loc, code, type,
12252 fold_convert_loc (loc, type, arg0),
12253 fold_convert_loc (loc, type,
12254 negate_expr (arg1)));
12256 /* X % -Y is the same as X % Y. */
12257 if (code == TRUNC_MOD_EXPR
12258 && !TYPE_UNSIGNED (type)
12259 && TREE_CODE (arg1) == NEGATE_EXPR
12260 && !TYPE_OVERFLOW_TRAPS (type))
12261 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12262 fold_convert_loc (loc, type,
12263 TREE_OPERAND (arg1, 0)));
12265 strict_overflow_p = false;
12266 if (TREE_CODE (arg1) == INTEGER_CST
12267 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12268 &strict_overflow_p)))
12270 if (strict_overflow_p)
12271 fold_overflow_warning (("assuming signed overflow does not occur "
12272 "when simplifying modulus"),
12273 WARN_STRICT_OVERFLOW_MISC);
12274 return fold_convert_loc (loc, type, tem);
12277 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12278 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12279 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12280 && (TYPE_UNSIGNED (type)
12281 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12283 tree c = arg1;
12284 /* Also optimize A % (C << N) where C is a power of 2,
12285 to A & ((C << N) - 1). */
12286 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12287 c = TREE_OPERAND (arg1, 0);
12289 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12291 tree mask
12292 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12293 build_int_cst (TREE_TYPE (arg1), 1));
12294 if (strict_overflow_p)
12295 fold_overflow_warning (("assuming signed overflow does not "
12296 "occur when simplifying "
12297 "X % (power of two)"),
12298 WARN_STRICT_OVERFLOW_MISC);
12299 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12300 fold_convert_loc (loc, type, arg0),
12301 fold_convert_loc (loc, type, mask));
12305 return NULL_TREE;
12307 case LROTATE_EXPR:
12308 case RROTATE_EXPR:
12309 if (integer_all_onesp (arg0))
12310 return omit_one_operand_loc (loc, type, arg0, arg1);
12311 goto shift;
12313 case RSHIFT_EXPR:
12314 /* Optimize -1 >> x for arithmetic right shifts. */
12315 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12316 && tree_expr_nonnegative_p (arg1))
12317 return omit_one_operand_loc (loc, type, arg0, arg1);
12318 /* ... fall through ... */
12320 case LSHIFT_EXPR:
12321 shift:
12322 if (integer_zerop (arg1))
12323 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12324 if (integer_zerop (arg0))
12325 return omit_one_operand_loc (loc, type, arg0, arg1);
12327 /* Since negative shift count is not well-defined,
12328 don't try to compute it in the compiler. */
12329 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12330 return NULL_TREE;
12332 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12333 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12334 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12335 && host_integerp (TREE_OPERAND (arg0, 1), false)
12336 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12338 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12339 + TREE_INT_CST_LOW (arg1));
12341 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12342 being well defined. */
12343 if (low >= TYPE_PRECISION (type))
12345 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12346 low = low % TYPE_PRECISION (type);
12347 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12348 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12349 TREE_OPERAND (arg0, 0));
12350 else
12351 low = TYPE_PRECISION (type) - 1;
12354 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12355 build_int_cst (type, low));
12358 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12359 into x & ((unsigned)-1 >> c) for unsigned types. */
12360 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12361 || (TYPE_UNSIGNED (type)
12362 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12363 && host_integerp (arg1, false)
12364 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12365 && host_integerp (TREE_OPERAND (arg0, 1), false)
12366 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12368 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12369 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12370 tree lshift;
12371 tree arg00;
12373 if (low0 == low1)
12375 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12377 lshift = build_int_cst (type, -1);
12378 lshift = int_const_binop (code, lshift, arg1);
12380 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12384 /* Rewrite an LROTATE_EXPR by a constant into an
12385 RROTATE_EXPR by a new constant. */
12386 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12388 tree tem = build_int_cst (TREE_TYPE (arg1),
12389 TYPE_PRECISION (type));
12390 tem = const_binop (MINUS_EXPR, tem, arg1);
12391 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12394 /* If we have a rotate of a bit operation with the rotate count and
12395 the second operand of the bit operation both constant,
12396 permute the two operations. */
12397 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12398 && (TREE_CODE (arg0) == BIT_AND_EXPR
12399 || TREE_CODE (arg0) == BIT_IOR_EXPR
12400 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12401 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12402 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12403 fold_build2_loc (loc, code, type,
12404 TREE_OPERAND (arg0, 0), arg1),
12405 fold_build2_loc (loc, code, type,
12406 TREE_OPERAND (arg0, 1), arg1));
12408 /* Two consecutive rotates adding up to the precision of the
12409 type can be ignored. */
12410 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12411 && TREE_CODE (arg0) == RROTATE_EXPR
12412 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12413 && TREE_INT_CST_HIGH (arg1) == 0
12414 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12415 && ((TREE_INT_CST_LOW (arg1)
12416 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12417 == (unsigned int) TYPE_PRECISION (type)))
12418 return TREE_OPERAND (arg0, 0);
12420 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12421 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12422 if the latter can be further optimized. */
12423 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12424 && TREE_CODE (arg0) == BIT_AND_EXPR
12425 && TREE_CODE (arg1) == INTEGER_CST
12426 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12428 tree mask = fold_build2_loc (loc, code, type,
12429 fold_convert_loc (loc, type,
12430 TREE_OPERAND (arg0, 1)),
12431 arg1);
12432 tree shift = fold_build2_loc (loc, code, type,
12433 fold_convert_loc (loc, type,
12434 TREE_OPERAND (arg0, 0)),
12435 arg1);
12436 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12437 if (tem)
12438 return tem;
12441 return NULL_TREE;
12443 case MIN_EXPR:
12444 if (operand_equal_p (arg0, arg1, 0))
12445 return omit_one_operand_loc (loc, type, arg0, arg1);
12446 if (INTEGRAL_TYPE_P (type)
12447 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12448 return omit_one_operand_loc (loc, type, arg1, arg0);
12449 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12450 if (tem)
12451 return tem;
12452 goto associate;
12454 case MAX_EXPR:
12455 if (operand_equal_p (arg0, arg1, 0))
12456 return omit_one_operand_loc (loc, type, arg0, arg1);
12457 if (INTEGRAL_TYPE_P (type)
12458 && TYPE_MAX_VALUE (type)
12459 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12460 return omit_one_operand_loc (loc, type, arg1, arg0);
12461 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12462 if (tem)
12463 return tem;
12464 goto associate;
12466 case TRUTH_ANDIF_EXPR:
12467 /* Note that the operands of this must be ints
12468 and their values must be 0 or 1.
12469 ("true" is a fixed value perhaps depending on the language.) */
12470 /* If first arg is constant zero, return it. */
12471 if (integer_zerop (arg0))
12472 return fold_convert_loc (loc, type, arg0);
12473 case TRUTH_AND_EXPR:
12474 /* If either arg is constant true, drop it. */
12475 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12476 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12477 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12478 /* Preserve sequence points. */
12479 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12480 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12481 /* If second arg is constant zero, result is zero, but first arg
12482 must be evaluated. */
12483 if (integer_zerop (arg1))
12484 return omit_one_operand_loc (loc, type, arg1, arg0);
12485 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12486 case will be handled here. */
12487 if (integer_zerop (arg0))
12488 return omit_one_operand_loc (loc, type, arg0, arg1);
12490 /* !X && X is always false. */
12491 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12492 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12493 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12494 /* X && !X is always false. */
12495 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12496 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12497 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12499 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12500 means A >= Y && A != MAX, but in this case we know that
12501 A < X <= MAX. */
12503 if (!TREE_SIDE_EFFECTS (arg0)
12504 && !TREE_SIDE_EFFECTS (arg1))
12506 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12507 if (tem && !operand_equal_p (tem, arg0, 0))
12508 return fold_build2_loc (loc, code, type, tem, arg1);
12510 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12511 if (tem && !operand_equal_p (tem, arg1, 0))
12512 return fold_build2_loc (loc, code, type, arg0, tem);
12515 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12516 != NULL_TREE)
12517 return tem;
12519 return NULL_TREE;
12521 case TRUTH_ORIF_EXPR:
12522 /* Note that the operands of this must be ints
12523 and their values must be 0 or true.
12524 ("true" is a fixed value perhaps depending on the language.) */
12525 /* If first arg is constant true, return it. */
12526 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12527 return fold_convert_loc (loc, type, arg0);
12528 case TRUTH_OR_EXPR:
12529 /* If either arg is constant zero, drop it. */
12530 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12531 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12532 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12533 /* Preserve sequence points. */
12534 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12535 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12536 /* If second arg is constant true, result is true, but we must
12537 evaluate first arg. */
12538 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12539 return omit_one_operand_loc (loc, type, arg1, arg0);
12540 /* Likewise for first arg, but note this only occurs here for
12541 TRUTH_OR_EXPR. */
12542 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12543 return omit_one_operand_loc (loc, type, arg0, arg1);
12545 /* !X || X is always true. */
12546 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12547 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12548 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12549 /* X || !X is always true. */
12550 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12551 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12552 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12554 /* (X && !Y) || (!X && Y) is X ^ Y */
12555 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12556 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12558 tree a0, a1, l0, l1, n0, n1;
12560 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12561 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12563 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12564 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12566 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12567 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12569 if ((operand_equal_p (n0, a0, 0)
12570 && operand_equal_p (n1, a1, 0))
12571 || (operand_equal_p (n0, a1, 0)
12572 && operand_equal_p (n1, a0, 0)))
12573 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12576 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12577 != NULL_TREE)
12578 return tem;
12580 return NULL_TREE;
12582 case TRUTH_XOR_EXPR:
12583 /* If the second arg is constant zero, drop it. */
12584 if (integer_zerop (arg1))
12585 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12586 /* If the second arg is constant true, this is a logical inversion. */
12587 if (integer_onep (arg1))
12589 /* Only call invert_truthvalue if operand is a truth value. */
12590 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12591 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12592 else
12593 tem = invert_truthvalue_loc (loc, arg0);
12594 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12596 /* Identical arguments cancel to zero. */
12597 if (operand_equal_p (arg0, arg1, 0))
12598 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12600 /* !X ^ X is always true. */
12601 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12602 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12603 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12605 /* X ^ !X is always true. */
12606 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12607 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12608 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12610 return NULL_TREE;
12612 case EQ_EXPR:
12613 case NE_EXPR:
12614 STRIP_NOPS (arg0);
12615 STRIP_NOPS (arg1);
12617 tem = fold_comparison (loc, code, type, op0, op1);
12618 if (tem != NULL_TREE)
12619 return tem;
12621 /* bool_var != 0 becomes bool_var. */
12622 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12623 && code == NE_EXPR)
12624 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12626 /* bool_var == 1 becomes bool_var. */
12627 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12628 && code == EQ_EXPR)
12629 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12631 /* bool_var != 1 becomes !bool_var. */
12632 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12633 && code == NE_EXPR)
12634 return fold_convert_loc (loc, type,
12635 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12636 TREE_TYPE (arg0), arg0));
12638 /* bool_var == 0 becomes !bool_var. */
12639 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12640 && code == EQ_EXPR)
12641 return fold_convert_loc (loc, type,
12642 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12643 TREE_TYPE (arg0), arg0));
12645 /* !exp != 0 becomes !exp */
12646 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12647 && code == NE_EXPR)
12648 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12650 /* If this is an equality comparison of the address of two non-weak,
12651 unaliased symbols neither of which are extern (since we do not
12652 have access to attributes for externs), then we know the result. */
12653 if (TREE_CODE (arg0) == ADDR_EXPR
12654 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12655 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12656 && ! lookup_attribute ("alias",
12657 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12658 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12659 && TREE_CODE (arg1) == ADDR_EXPR
12660 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12661 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12662 && ! lookup_attribute ("alias",
12663 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12664 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12666 /* We know that we're looking at the address of two
12667 non-weak, unaliased, static _DECL nodes.
12669 It is both wasteful and incorrect to call operand_equal_p
12670 to compare the two ADDR_EXPR nodes. It is wasteful in that
12671 all we need to do is test pointer equality for the arguments
12672 to the two ADDR_EXPR nodes. It is incorrect to use
12673 operand_equal_p as that function is NOT equivalent to a
12674 C equality test. It can in fact return false for two
12675 objects which would test as equal using the C equality
12676 operator. */
12677 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12678 return constant_boolean_node (equal
12679 ? code == EQ_EXPR : code != EQ_EXPR,
12680 type);
12683 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12684 a MINUS_EXPR of a constant, we can convert it into a comparison with
12685 a revised constant as long as no overflow occurs. */
12686 if (TREE_CODE (arg1) == INTEGER_CST
12687 && (TREE_CODE (arg0) == PLUS_EXPR
12688 || TREE_CODE (arg0) == MINUS_EXPR)
12689 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12690 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12691 ? MINUS_EXPR : PLUS_EXPR,
12692 fold_convert_loc (loc, TREE_TYPE (arg0),
12693 arg1),
12694 TREE_OPERAND (arg0, 1)))
12695 && !TREE_OVERFLOW (tem))
12696 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12698 /* Similarly for a NEGATE_EXPR. */
12699 if (TREE_CODE (arg0) == NEGATE_EXPR
12700 && TREE_CODE (arg1) == INTEGER_CST
12701 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12702 arg1)))
12703 && TREE_CODE (tem) == INTEGER_CST
12704 && !TREE_OVERFLOW (tem))
12705 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12707 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12708 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12709 && TREE_CODE (arg1) == INTEGER_CST
12710 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12711 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12712 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12713 fold_convert_loc (loc,
12714 TREE_TYPE (arg0),
12715 arg1),
12716 TREE_OPERAND (arg0, 1)));
12718 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12719 if ((TREE_CODE (arg0) == PLUS_EXPR
12720 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12721 || TREE_CODE (arg0) == MINUS_EXPR)
12722 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12723 0)),
12724 arg1, 0)
12725 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12726 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12728 tree val = TREE_OPERAND (arg0, 1);
12729 return omit_two_operands_loc (loc, type,
12730 fold_build2_loc (loc, code, type,
12731 val,
12732 build_int_cst (TREE_TYPE (val),
12733 0)),
12734 TREE_OPERAND (arg0, 0), arg1);
12737 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12738 if (TREE_CODE (arg0) == MINUS_EXPR
12739 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12740 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12741 1)),
12742 arg1, 0)
12743 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12745 return omit_two_operands_loc (loc, type,
12746 code == NE_EXPR
12747 ? boolean_true_node : boolean_false_node,
12748 TREE_OPERAND (arg0, 1), arg1);
12751 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12752 for !=. Don't do this for ordered comparisons due to overflow. */
12753 if (TREE_CODE (arg0) == MINUS_EXPR
12754 && integer_zerop (arg1))
12755 return fold_build2_loc (loc, code, type,
12756 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12758 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12759 if (TREE_CODE (arg0) == ABS_EXPR
12760 && (integer_zerop (arg1) || real_zerop (arg1)))
12761 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12763 /* If this is an EQ or NE comparison with zero and ARG0 is
12764 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12765 two operations, but the latter can be done in one less insn
12766 on machines that have only two-operand insns or on which a
12767 constant cannot be the first operand. */
12768 if (TREE_CODE (arg0) == BIT_AND_EXPR
12769 && integer_zerop (arg1))
12771 tree arg00 = TREE_OPERAND (arg0, 0);
12772 tree arg01 = TREE_OPERAND (arg0, 1);
12773 if (TREE_CODE (arg00) == LSHIFT_EXPR
12774 && integer_onep (TREE_OPERAND (arg00, 0)))
12776 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12777 arg01, TREE_OPERAND (arg00, 1));
12778 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12779 build_int_cst (TREE_TYPE (arg0), 1));
12780 return fold_build2_loc (loc, code, type,
12781 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12782 arg1);
12784 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12785 && integer_onep (TREE_OPERAND (arg01, 0)))
12787 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12788 arg00, TREE_OPERAND (arg01, 1));
12789 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12790 build_int_cst (TREE_TYPE (arg0), 1));
12791 return fold_build2_loc (loc, code, type,
12792 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12793 arg1);
12797 /* If this is an NE or EQ comparison of zero against the result of a
12798 signed MOD operation whose second operand is a power of 2, make
12799 the MOD operation unsigned since it is simpler and equivalent. */
12800 if (integer_zerop (arg1)
12801 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12802 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12803 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12804 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12805 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12806 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12808 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12809 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12810 fold_convert_loc (loc, newtype,
12811 TREE_OPERAND (arg0, 0)),
12812 fold_convert_loc (loc, newtype,
12813 TREE_OPERAND (arg0, 1)));
12815 return fold_build2_loc (loc, code, type, newmod,
12816 fold_convert_loc (loc, newtype, arg1));
12819 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12820 C1 is a valid shift constant, and C2 is a power of two, i.e.
12821 a single bit. */
12822 if (TREE_CODE (arg0) == BIT_AND_EXPR
12823 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12824 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12825 == INTEGER_CST
12826 && integer_pow2p (TREE_OPERAND (arg0, 1))
12827 && integer_zerop (arg1))
12829 tree itype = TREE_TYPE (arg0);
12830 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12831 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12833 /* Check for a valid shift count. */
12834 if (TREE_INT_CST_HIGH (arg001) == 0
12835 && TREE_INT_CST_LOW (arg001) < prec)
12837 tree arg01 = TREE_OPERAND (arg0, 1);
12838 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12839 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12840 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12841 can be rewritten as (X & (C2 << C1)) != 0. */
12842 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12844 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12845 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12846 return fold_build2_loc (loc, code, type, tem,
12847 fold_convert_loc (loc, itype, arg1));
12849 /* Otherwise, for signed (arithmetic) shifts,
12850 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12851 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12852 else if (!TYPE_UNSIGNED (itype))
12853 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12854 arg000, build_int_cst (itype, 0));
12855 /* Otherwise, of unsigned (logical) shifts,
12856 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12857 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12858 else
12859 return omit_one_operand_loc (loc, type,
12860 code == EQ_EXPR ? integer_one_node
12861 : integer_zero_node,
12862 arg000);
12866 /* If we have (A & C) == C where C is a power of 2, convert this into
12867 (A & C) != 0. Similarly for NE_EXPR. */
12868 if (TREE_CODE (arg0) == BIT_AND_EXPR
12869 && integer_pow2p (TREE_OPERAND (arg0, 1))
12870 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12871 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12872 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12873 integer_zero_node));
12875 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12876 bit, then fold the expression into A < 0 or A >= 0. */
12877 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12878 if (tem)
12879 return tem;
12881 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12882 Similarly for NE_EXPR. */
12883 if (TREE_CODE (arg0) == BIT_AND_EXPR
12884 && TREE_CODE (arg1) == INTEGER_CST
12885 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12887 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12888 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12889 TREE_OPERAND (arg0, 1));
12890 tree dandnotc
12891 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12892 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12893 notc);
12894 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12895 if (integer_nonzerop (dandnotc))
12896 return omit_one_operand_loc (loc, type, rslt, arg0);
12899 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12900 Similarly for NE_EXPR. */
12901 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12902 && TREE_CODE (arg1) == INTEGER_CST
12903 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12905 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12906 tree candnotd
12907 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12908 TREE_OPERAND (arg0, 1),
12909 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12910 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12911 if (integer_nonzerop (candnotd))
12912 return omit_one_operand_loc (loc, type, rslt, arg0);
12915 /* If this is a comparison of a field, we may be able to simplify it. */
12916 if ((TREE_CODE (arg0) == COMPONENT_REF
12917 || TREE_CODE (arg0) == BIT_FIELD_REF)
12918 /* Handle the constant case even without -O
12919 to make sure the warnings are given. */
12920 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12922 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12923 if (t1)
12924 return t1;
12927 /* Optimize comparisons of strlen vs zero to a compare of the
12928 first character of the string vs zero. To wit,
12929 strlen(ptr) == 0 => *ptr == 0
12930 strlen(ptr) != 0 => *ptr != 0
12931 Other cases should reduce to one of these two (or a constant)
12932 due to the return value of strlen being unsigned. */
12933 if (TREE_CODE (arg0) == CALL_EXPR
12934 && integer_zerop (arg1))
12936 tree fndecl = get_callee_fndecl (arg0);
12938 if (fndecl
12939 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12940 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12941 && call_expr_nargs (arg0) == 1
12942 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12944 tree iref = build_fold_indirect_ref_loc (loc,
12945 CALL_EXPR_ARG (arg0, 0));
12946 return fold_build2_loc (loc, code, type, iref,
12947 build_int_cst (TREE_TYPE (iref), 0));
12951 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12952 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12953 if (TREE_CODE (arg0) == RSHIFT_EXPR
12954 && integer_zerop (arg1)
12955 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12957 tree arg00 = TREE_OPERAND (arg0, 0);
12958 tree arg01 = TREE_OPERAND (arg0, 1);
12959 tree itype = TREE_TYPE (arg00);
12960 if (TREE_INT_CST_HIGH (arg01) == 0
12961 && TREE_INT_CST_LOW (arg01)
12962 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12964 if (TYPE_UNSIGNED (itype))
12966 itype = signed_type_for (itype);
12967 arg00 = fold_convert_loc (loc, itype, arg00);
12969 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12970 type, arg00, build_zero_cst (itype));
12974 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12975 if (integer_zerop (arg1)
12976 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12977 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12978 TREE_OPERAND (arg0, 1));
12980 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12981 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12982 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12983 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12984 build_zero_cst (TREE_TYPE (arg0)));
12985 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12986 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12987 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12988 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12989 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12990 build_zero_cst (TREE_TYPE (arg0)));
12992 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12993 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12994 && TREE_CODE (arg1) == INTEGER_CST
12995 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12996 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12997 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12998 TREE_OPERAND (arg0, 1), arg1));
13000 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13001 (X & C) == 0 when C is a single bit. */
13002 if (TREE_CODE (arg0) == BIT_AND_EXPR
13003 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13004 && integer_zerop (arg1)
13005 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13007 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13008 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13009 TREE_OPERAND (arg0, 1));
13010 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13011 type, tem,
13012 fold_convert_loc (loc, TREE_TYPE (arg0),
13013 arg1));
13016 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13017 constant C is a power of two, i.e. a single bit. */
13018 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13019 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13020 && integer_zerop (arg1)
13021 && integer_pow2p (TREE_OPERAND (arg0, 1))
13022 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13023 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13025 tree arg00 = TREE_OPERAND (arg0, 0);
13026 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13027 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13030 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13031 when is C is a power of two, i.e. a single bit. */
13032 if (TREE_CODE (arg0) == BIT_AND_EXPR
13033 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13034 && integer_zerop (arg1)
13035 && integer_pow2p (TREE_OPERAND (arg0, 1))
13036 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13037 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13039 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13040 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13041 arg000, TREE_OPERAND (arg0, 1));
13042 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13043 tem, build_int_cst (TREE_TYPE (tem), 0));
13046 if (integer_zerop (arg1)
13047 && tree_expr_nonzero_p (arg0))
13049 tree res = constant_boolean_node (code==NE_EXPR, type);
13050 return omit_one_operand_loc (loc, type, res, arg0);
13053 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13054 if (TREE_CODE (arg0) == NEGATE_EXPR
13055 && TREE_CODE (arg1) == NEGATE_EXPR)
13056 return fold_build2_loc (loc, code, type,
13057 TREE_OPERAND (arg0, 0),
13058 fold_convert_loc (loc, TREE_TYPE (arg0),
13059 TREE_OPERAND (arg1, 0)));
13061 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13062 if (TREE_CODE (arg0) == BIT_AND_EXPR
13063 && TREE_CODE (arg1) == BIT_AND_EXPR)
13065 tree arg00 = TREE_OPERAND (arg0, 0);
13066 tree arg01 = TREE_OPERAND (arg0, 1);
13067 tree arg10 = TREE_OPERAND (arg1, 0);
13068 tree arg11 = TREE_OPERAND (arg1, 1);
13069 tree itype = TREE_TYPE (arg0);
13071 if (operand_equal_p (arg01, arg11, 0))
13072 return fold_build2_loc (loc, code, type,
13073 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13074 fold_build2_loc (loc,
13075 BIT_XOR_EXPR, itype,
13076 arg00, arg10),
13077 arg01),
13078 build_zero_cst (itype));
13080 if (operand_equal_p (arg01, arg10, 0))
13081 return fold_build2_loc (loc, code, type,
13082 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13083 fold_build2_loc (loc,
13084 BIT_XOR_EXPR, itype,
13085 arg00, arg11),
13086 arg01),
13087 build_zero_cst (itype));
13089 if (operand_equal_p (arg00, arg11, 0))
13090 return fold_build2_loc (loc, code, type,
13091 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13092 fold_build2_loc (loc,
13093 BIT_XOR_EXPR, itype,
13094 arg01, arg10),
13095 arg00),
13096 build_zero_cst (itype));
13098 if (operand_equal_p (arg00, arg10, 0))
13099 return fold_build2_loc (loc, code, type,
13100 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13101 fold_build2_loc (loc,
13102 BIT_XOR_EXPR, itype,
13103 arg01, arg11),
13104 arg00),
13105 build_zero_cst (itype));
13108 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13109 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13111 tree arg00 = TREE_OPERAND (arg0, 0);
13112 tree arg01 = TREE_OPERAND (arg0, 1);
13113 tree arg10 = TREE_OPERAND (arg1, 0);
13114 tree arg11 = TREE_OPERAND (arg1, 1);
13115 tree itype = TREE_TYPE (arg0);
13117 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13118 operand_equal_p guarantees no side-effects so we don't need
13119 to use omit_one_operand on Z. */
13120 if (operand_equal_p (arg01, arg11, 0))
13121 return fold_build2_loc (loc, code, type, arg00,
13122 fold_convert_loc (loc, TREE_TYPE (arg00),
13123 arg10));
13124 if (operand_equal_p (arg01, arg10, 0))
13125 return fold_build2_loc (loc, code, type, arg00,
13126 fold_convert_loc (loc, TREE_TYPE (arg00),
13127 arg11));
13128 if (operand_equal_p (arg00, arg11, 0))
13129 return fold_build2_loc (loc, code, type, arg01,
13130 fold_convert_loc (loc, TREE_TYPE (arg01),
13131 arg10));
13132 if (operand_equal_p (arg00, arg10, 0))
13133 return fold_build2_loc (loc, code, type, arg01,
13134 fold_convert_loc (loc, TREE_TYPE (arg01),
13135 arg11));
13137 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13138 if (TREE_CODE (arg01) == INTEGER_CST
13139 && TREE_CODE (arg11) == INTEGER_CST)
13141 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13142 fold_convert_loc (loc, itype, arg11));
13143 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13144 return fold_build2_loc (loc, code, type, tem,
13145 fold_convert_loc (loc, itype, arg10));
13149 /* Attempt to simplify equality/inequality comparisons of complex
13150 values. Only lower the comparison if the result is known or
13151 can be simplified to a single scalar comparison. */
13152 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13153 || TREE_CODE (arg0) == COMPLEX_CST)
13154 && (TREE_CODE (arg1) == COMPLEX_EXPR
13155 || TREE_CODE (arg1) == COMPLEX_CST))
13157 tree real0, imag0, real1, imag1;
13158 tree rcond, icond;
13160 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13162 real0 = TREE_OPERAND (arg0, 0);
13163 imag0 = TREE_OPERAND (arg0, 1);
13165 else
13167 real0 = TREE_REALPART (arg0);
13168 imag0 = TREE_IMAGPART (arg0);
13171 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13173 real1 = TREE_OPERAND (arg1, 0);
13174 imag1 = TREE_OPERAND (arg1, 1);
13176 else
13178 real1 = TREE_REALPART (arg1);
13179 imag1 = TREE_IMAGPART (arg1);
13182 rcond = fold_binary_loc (loc, code, type, real0, real1);
13183 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13185 if (integer_zerop (rcond))
13187 if (code == EQ_EXPR)
13188 return omit_two_operands_loc (loc, type, boolean_false_node,
13189 imag0, imag1);
13190 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13192 else
13194 if (code == NE_EXPR)
13195 return omit_two_operands_loc (loc, type, boolean_true_node,
13196 imag0, imag1);
13197 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13201 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13202 if (icond && TREE_CODE (icond) == INTEGER_CST)
13204 if (integer_zerop (icond))
13206 if (code == EQ_EXPR)
13207 return omit_two_operands_loc (loc, type, boolean_false_node,
13208 real0, real1);
13209 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13211 else
13213 if (code == NE_EXPR)
13214 return omit_two_operands_loc (loc, type, boolean_true_node,
13215 real0, real1);
13216 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13221 return NULL_TREE;
13223 case LT_EXPR:
13224 case GT_EXPR:
13225 case LE_EXPR:
13226 case GE_EXPR:
13227 tem = fold_comparison (loc, code, type, op0, op1);
13228 if (tem != NULL_TREE)
13229 return tem;
13231 /* Transform comparisons of the form X +- C CMP X. */
13232 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13233 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13234 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13235 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13236 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13237 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13239 tree arg01 = TREE_OPERAND (arg0, 1);
13240 enum tree_code code0 = TREE_CODE (arg0);
13241 int is_positive;
13243 if (TREE_CODE (arg01) == REAL_CST)
13244 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13245 else
13246 is_positive = tree_int_cst_sgn (arg01);
13248 /* (X - c) > X becomes false. */
13249 if (code == GT_EXPR
13250 && ((code0 == MINUS_EXPR && is_positive >= 0)
13251 || (code0 == PLUS_EXPR && is_positive <= 0)))
13253 if (TREE_CODE (arg01) == INTEGER_CST
13254 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13255 fold_overflow_warning (("assuming signed overflow does not "
13256 "occur when assuming that (X - c) > X "
13257 "is always false"),
13258 WARN_STRICT_OVERFLOW_ALL);
13259 return constant_boolean_node (0, type);
13262 /* Likewise (X + c) < X becomes false. */
13263 if (code == LT_EXPR
13264 && ((code0 == PLUS_EXPR && is_positive >= 0)
13265 || (code0 == MINUS_EXPR && is_positive <= 0)))
13267 if (TREE_CODE (arg01) == INTEGER_CST
13268 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13269 fold_overflow_warning (("assuming signed overflow does not "
13270 "occur when assuming that "
13271 "(X + c) < X is always false"),
13272 WARN_STRICT_OVERFLOW_ALL);
13273 return constant_boolean_node (0, type);
13276 /* Convert (X - c) <= X to true. */
13277 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13278 && code == LE_EXPR
13279 && ((code0 == MINUS_EXPR && is_positive >= 0)
13280 || (code0 == PLUS_EXPR && is_positive <= 0)))
13282 if (TREE_CODE (arg01) == INTEGER_CST
13283 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13284 fold_overflow_warning (("assuming signed overflow does not "
13285 "occur when assuming that "
13286 "(X - c) <= X is always true"),
13287 WARN_STRICT_OVERFLOW_ALL);
13288 return constant_boolean_node (1, type);
13291 /* Convert (X + c) >= X to true. */
13292 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13293 && code == GE_EXPR
13294 && ((code0 == PLUS_EXPR && is_positive >= 0)
13295 || (code0 == MINUS_EXPR && is_positive <= 0)))
13297 if (TREE_CODE (arg01) == INTEGER_CST
13298 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13299 fold_overflow_warning (("assuming signed overflow does not "
13300 "occur when assuming that "
13301 "(X + c) >= X is always true"),
13302 WARN_STRICT_OVERFLOW_ALL);
13303 return constant_boolean_node (1, type);
13306 if (TREE_CODE (arg01) == INTEGER_CST)
13308 /* Convert X + c > X and X - c < X to true for integers. */
13309 if (code == GT_EXPR
13310 && ((code0 == PLUS_EXPR && is_positive > 0)
13311 || (code0 == MINUS_EXPR && is_positive < 0)))
13313 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13314 fold_overflow_warning (("assuming signed overflow does "
13315 "not occur when assuming that "
13316 "(X + c) > X is always true"),
13317 WARN_STRICT_OVERFLOW_ALL);
13318 return constant_boolean_node (1, type);
13321 if (code == LT_EXPR
13322 && ((code0 == MINUS_EXPR && is_positive > 0)
13323 || (code0 == PLUS_EXPR && is_positive < 0)))
13325 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13326 fold_overflow_warning (("assuming signed overflow does "
13327 "not occur when assuming that "
13328 "(X - c) < X is always true"),
13329 WARN_STRICT_OVERFLOW_ALL);
13330 return constant_boolean_node (1, type);
13333 /* Convert X + c <= X and X - c >= X to false for integers. */
13334 if (code == LE_EXPR
13335 && ((code0 == PLUS_EXPR && is_positive > 0)
13336 || (code0 == MINUS_EXPR && is_positive < 0)))
13338 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13339 fold_overflow_warning (("assuming signed overflow does "
13340 "not occur when assuming that "
13341 "(X + c) <= X is always false"),
13342 WARN_STRICT_OVERFLOW_ALL);
13343 return constant_boolean_node (0, type);
13346 if (code == GE_EXPR
13347 && ((code0 == MINUS_EXPR && is_positive > 0)
13348 || (code0 == PLUS_EXPR && is_positive < 0)))
13350 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13351 fold_overflow_warning (("assuming signed overflow does "
13352 "not occur when assuming that "
13353 "(X - c) >= X is always false"),
13354 WARN_STRICT_OVERFLOW_ALL);
13355 return constant_boolean_node (0, type);
13360 /* Comparisons with the highest or lowest possible integer of
13361 the specified precision will have known values. */
13363 tree arg1_type = TREE_TYPE (arg1);
13364 unsigned int width = TYPE_PRECISION (arg1_type);
13366 if (TREE_CODE (arg1) == INTEGER_CST
13367 && width <= HOST_BITS_PER_DOUBLE_INT
13368 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13370 HOST_WIDE_INT signed_max_hi;
13371 unsigned HOST_WIDE_INT signed_max_lo;
13372 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13374 if (width <= HOST_BITS_PER_WIDE_INT)
13376 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13377 - 1;
13378 signed_max_hi = 0;
13379 max_hi = 0;
13381 if (TYPE_UNSIGNED (arg1_type))
13383 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13384 min_lo = 0;
13385 min_hi = 0;
13387 else
13389 max_lo = signed_max_lo;
13390 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13391 min_hi = -1;
13394 else
13396 width -= HOST_BITS_PER_WIDE_INT;
13397 signed_max_lo = -1;
13398 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13399 - 1;
13400 max_lo = -1;
13401 min_lo = 0;
13403 if (TYPE_UNSIGNED (arg1_type))
13405 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13406 min_hi = 0;
13408 else
13410 max_hi = signed_max_hi;
13411 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13415 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13416 && TREE_INT_CST_LOW (arg1) == max_lo)
13417 switch (code)
13419 case GT_EXPR:
13420 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13422 case GE_EXPR:
13423 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13425 case LE_EXPR:
13426 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13428 case LT_EXPR:
13429 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13431 /* The GE_EXPR and LT_EXPR cases above are not normally
13432 reached because of previous transformations. */
13434 default:
13435 break;
13437 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13438 == max_hi
13439 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13440 switch (code)
13442 case GT_EXPR:
13443 arg1 = const_binop (PLUS_EXPR, arg1,
13444 build_int_cst (TREE_TYPE (arg1), 1));
13445 return fold_build2_loc (loc, EQ_EXPR, type,
13446 fold_convert_loc (loc,
13447 TREE_TYPE (arg1), arg0),
13448 arg1);
13449 case LE_EXPR:
13450 arg1 = const_binop (PLUS_EXPR, arg1,
13451 build_int_cst (TREE_TYPE (arg1), 1));
13452 return fold_build2_loc (loc, NE_EXPR, type,
13453 fold_convert_loc (loc, TREE_TYPE (arg1),
13454 arg0),
13455 arg1);
13456 default:
13457 break;
13459 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13460 == min_hi
13461 && TREE_INT_CST_LOW (arg1) == min_lo)
13462 switch (code)
13464 case LT_EXPR:
13465 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13467 case LE_EXPR:
13468 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13470 case GE_EXPR:
13471 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13473 case GT_EXPR:
13474 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13476 default:
13477 break;
13479 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13480 == min_hi
13481 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13482 switch (code)
13484 case GE_EXPR:
13485 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13486 return fold_build2_loc (loc, NE_EXPR, type,
13487 fold_convert_loc (loc,
13488 TREE_TYPE (arg1), arg0),
13489 arg1);
13490 case LT_EXPR:
13491 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13492 return fold_build2_loc (loc, EQ_EXPR, type,
13493 fold_convert_loc (loc, TREE_TYPE (arg1),
13494 arg0),
13495 arg1);
13496 default:
13497 break;
13500 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13501 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13502 && TYPE_UNSIGNED (arg1_type)
13503 /* We will flip the signedness of the comparison operator
13504 associated with the mode of arg1, so the sign bit is
13505 specified by this mode. Check that arg1 is the signed
13506 max associated with this sign bit. */
13507 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13508 /* signed_type does not work on pointer types. */
13509 && INTEGRAL_TYPE_P (arg1_type))
13511 /* The following case also applies to X < signed_max+1
13512 and X >= signed_max+1 because previous transformations. */
13513 if (code == LE_EXPR || code == GT_EXPR)
13515 tree st;
13516 st = signed_type_for (TREE_TYPE (arg1));
13517 return fold_build2_loc (loc,
13518 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13519 type, fold_convert_loc (loc, st, arg0),
13520 build_int_cst (st, 0));
13526 /* If we are comparing an ABS_EXPR with a constant, we can
13527 convert all the cases into explicit comparisons, but they may
13528 well not be faster than doing the ABS and one comparison.
13529 But ABS (X) <= C is a range comparison, which becomes a subtraction
13530 and a comparison, and is probably faster. */
13531 if (code == LE_EXPR
13532 && TREE_CODE (arg1) == INTEGER_CST
13533 && TREE_CODE (arg0) == ABS_EXPR
13534 && ! TREE_SIDE_EFFECTS (arg0)
13535 && (0 != (tem = negate_expr (arg1)))
13536 && TREE_CODE (tem) == INTEGER_CST
13537 && !TREE_OVERFLOW (tem))
13538 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13539 build2 (GE_EXPR, type,
13540 TREE_OPERAND (arg0, 0), tem),
13541 build2 (LE_EXPR, type,
13542 TREE_OPERAND (arg0, 0), arg1));
13544 /* Convert ABS_EXPR<x> >= 0 to true. */
13545 strict_overflow_p = false;
13546 if (code == GE_EXPR
13547 && (integer_zerop (arg1)
13548 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13549 && real_zerop (arg1)))
13550 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13552 if (strict_overflow_p)
13553 fold_overflow_warning (("assuming signed overflow does not occur "
13554 "when simplifying comparison of "
13555 "absolute value and zero"),
13556 WARN_STRICT_OVERFLOW_CONDITIONAL);
13557 return omit_one_operand_loc (loc, type,
13558 constant_boolean_node (true, type),
13559 arg0);
13562 /* Convert ABS_EXPR<x> < 0 to false. */
13563 strict_overflow_p = false;
13564 if (code == LT_EXPR
13565 && (integer_zerop (arg1) || real_zerop (arg1))
13566 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13568 if (strict_overflow_p)
13569 fold_overflow_warning (("assuming signed overflow does not occur "
13570 "when simplifying comparison of "
13571 "absolute value and zero"),
13572 WARN_STRICT_OVERFLOW_CONDITIONAL);
13573 return omit_one_operand_loc (loc, type,
13574 constant_boolean_node (false, type),
13575 arg0);
13578 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13579 and similarly for >= into !=. */
13580 if ((code == LT_EXPR || code == GE_EXPR)
13581 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13582 && TREE_CODE (arg1) == LSHIFT_EXPR
13583 && integer_onep (TREE_OPERAND (arg1, 0)))
13584 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13585 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13586 TREE_OPERAND (arg1, 1)),
13587 build_zero_cst (TREE_TYPE (arg0)));
13589 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13590 otherwise Y might be >= # of bits in X's type and thus e.g.
13591 (unsigned char) (1 << Y) for Y 15 might be 0.
13592 If the cast is widening, then 1 << Y should have unsigned type,
13593 otherwise if Y is number of bits in the signed shift type minus 1,
13594 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13595 31 might be 0xffffffff80000000. */
13596 if ((code == LT_EXPR || code == GE_EXPR)
13597 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13598 && CONVERT_EXPR_P (arg1)
13599 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13600 && (TYPE_PRECISION (TREE_TYPE (arg1))
13601 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13602 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13603 || (TYPE_PRECISION (TREE_TYPE (arg1))
13604 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13605 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13607 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13608 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13609 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13610 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13611 build_zero_cst (TREE_TYPE (arg0)));
13614 return NULL_TREE;
13616 case UNORDERED_EXPR:
13617 case ORDERED_EXPR:
13618 case UNLT_EXPR:
13619 case UNLE_EXPR:
13620 case UNGT_EXPR:
13621 case UNGE_EXPR:
13622 case UNEQ_EXPR:
13623 case LTGT_EXPR:
13624 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13626 t1 = fold_relational_const (code, type, arg0, arg1);
13627 if (t1 != NULL_TREE)
13628 return t1;
13631 /* If the first operand is NaN, the result is constant. */
13632 if (TREE_CODE (arg0) == REAL_CST
13633 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13634 && (code != LTGT_EXPR || ! flag_trapping_math))
13636 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13637 ? integer_zero_node
13638 : integer_one_node;
13639 return omit_one_operand_loc (loc, type, t1, arg1);
13642 /* If the second operand is NaN, the result is constant. */
13643 if (TREE_CODE (arg1) == REAL_CST
13644 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13645 && (code != LTGT_EXPR || ! flag_trapping_math))
13647 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13648 ? integer_zero_node
13649 : integer_one_node;
13650 return omit_one_operand_loc (loc, type, t1, arg0);
13653 /* Simplify unordered comparison of something with itself. */
13654 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13655 && operand_equal_p (arg0, arg1, 0))
13656 return constant_boolean_node (1, type);
13658 if (code == LTGT_EXPR
13659 && !flag_trapping_math
13660 && operand_equal_p (arg0, arg1, 0))
13661 return constant_boolean_node (0, type);
13663 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13665 tree targ0 = strip_float_extensions (arg0);
13666 tree targ1 = strip_float_extensions (arg1);
13667 tree newtype = TREE_TYPE (targ0);
13669 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13670 newtype = TREE_TYPE (targ1);
13672 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13673 return fold_build2_loc (loc, code, type,
13674 fold_convert_loc (loc, newtype, targ0),
13675 fold_convert_loc (loc, newtype, targ1));
13678 return NULL_TREE;
13680 case COMPOUND_EXPR:
13681 /* When pedantic, a compound expression can be neither an lvalue
13682 nor an integer constant expression. */
13683 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13684 return NULL_TREE;
13685 /* Don't let (0, 0) be null pointer constant. */
13686 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13687 : fold_convert_loc (loc, type, arg1);
13688 return pedantic_non_lvalue_loc (loc, tem);
13690 case COMPLEX_EXPR:
13691 if ((TREE_CODE (arg0) == REAL_CST
13692 && TREE_CODE (arg1) == REAL_CST)
13693 || (TREE_CODE (arg0) == INTEGER_CST
13694 && TREE_CODE (arg1) == INTEGER_CST))
13695 return build_complex (type, arg0, arg1);
13696 if (TREE_CODE (arg0) == REALPART_EXPR
13697 && TREE_CODE (arg1) == IMAGPART_EXPR
13698 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13699 && operand_equal_p (TREE_OPERAND (arg0, 0),
13700 TREE_OPERAND (arg1, 0), 0))
13701 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13702 TREE_OPERAND (arg1, 0));
13703 return NULL_TREE;
13705 case ASSERT_EXPR:
13706 /* An ASSERT_EXPR should never be passed to fold_binary. */
13707 gcc_unreachable ();
13709 case VEC_PACK_TRUNC_EXPR:
13710 case VEC_PACK_FIX_TRUNC_EXPR:
13712 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13713 tree *elts;
13715 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13716 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13717 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13718 return NULL_TREE;
13720 elts = XALLOCAVEC (tree, nelts);
13721 if (!vec_cst_ctor_to_array (arg0, elts)
13722 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13723 return NULL_TREE;
13725 for (i = 0; i < nelts; i++)
13727 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13728 ? NOP_EXPR : FIX_TRUNC_EXPR,
13729 TREE_TYPE (type), elts[i]);
13730 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13731 return NULL_TREE;
13734 return build_vector (type, elts);
13737 case VEC_WIDEN_MULT_LO_EXPR:
13738 case VEC_WIDEN_MULT_HI_EXPR:
13739 case VEC_WIDEN_MULT_EVEN_EXPR:
13740 case VEC_WIDEN_MULT_ODD_EXPR:
13742 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13743 unsigned int out, ofs, scale;
13744 tree *elts;
13746 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13747 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13748 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13749 return NULL_TREE;
13751 elts = XALLOCAVEC (tree, nelts * 4);
13752 if (!vec_cst_ctor_to_array (arg0, elts)
13753 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13754 return NULL_TREE;
13756 if (code == VEC_WIDEN_MULT_LO_EXPR)
13757 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13758 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13759 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13760 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13761 scale = 1, ofs = 0;
13762 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13763 scale = 1, ofs = 1;
13765 for (out = 0; out < nelts; out++)
13767 unsigned int in1 = (out << scale) + ofs;
13768 unsigned int in2 = in1 + nelts * 2;
13769 tree t1, t2;
13771 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13772 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13774 if (t1 == NULL_TREE || t2 == NULL_TREE)
13775 return NULL_TREE;
13776 elts[out] = const_binop (MULT_EXPR, t1, t2);
13777 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13778 return NULL_TREE;
13781 return build_vector (type, elts);
13784 default:
13785 return NULL_TREE;
13786 } /* switch (code) */
13789 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13790 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13791 of GOTO_EXPR. */
13793 static tree
13794 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13796 switch (TREE_CODE (*tp))
13798 case LABEL_EXPR:
13799 return *tp;
13801 case GOTO_EXPR:
13802 *walk_subtrees = 0;
13804 /* ... fall through ... */
13806 default:
13807 return NULL_TREE;
13811 /* Return whether the sub-tree ST contains a label which is accessible from
13812 outside the sub-tree. */
13814 static bool
13815 contains_label_p (tree st)
13817 return
13818 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13821 /* Fold a ternary expression of code CODE and type TYPE with operands
13822 OP0, OP1, and OP2. Return the folded expression if folding is
13823 successful. Otherwise, return NULL_TREE. */
13825 tree
13826 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13827 tree op0, tree op1, tree op2)
13829 tree tem;
13830 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13831 enum tree_code_class kind = TREE_CODE_CLASS (code);
13833 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13834 && TREE_CODE_LENGTH (code) == 3);
13836 /* Strip any conversions that don't change the mode. This is safe
13837 for every expression, except for a comparison expression because
13838 its signedness is derived from its operands. So, in the latter
13839 case, only strip conversions that don't change the signedness.
13841 Note that this is done as an internal manipulation within the
13842 constant folder, in order to find the simplest representation of
13843 the arguments so that their form can be studied. In any cases,
13844 the appropriate type conversions should be put back in the tree
13845 that will get out of the constant folder. */
13846 if (op0)
13848 arg0 = op0;
13849 STRIP_NOPS (arg0);
13852 if (op1)
13854 arg1 = op1;
13855 STRIP_NOPS (arg1);
13858 if (op2)
13860 arg2 = op2;
13861 STRIP_NOPS (arg2);
13864 switch (code)
13866 case COMPONENT_REF:
13867 if (TREE_CODE (arg0) == CONSTRUCTOR
13868 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13870 unsigned HOST_WIDE_INT idx;
13871 tree field, value;
13872 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13873 if (field == arg1)
13874 return value;
13876 return NULL_TREE;
13878 case COND_EXPR:
13879 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13880 so all simple results must be passed through pedantic_non_lvalue. */
13881 if (TREE_CODE (arg0) == INTEGER_CST)
13883 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13884 tem = integer_zerop (arg0) ? op2 : op1;
13885 /* Only optimize constant conditions when the selected branch
13886 has the same type as the COND_EXPR. This avoids optimizing
13887 away "c ? x : throw", where the throw has a void type.
13888 Avoid throwing away that operand which contains label. */
13889 if ((!TREE_SIDE_EFFECTS (unused_op)
13890 || !contains_label_p (unused_op))
13891 && (! VOID_TYPE_P (TREE_TYPE (tem))
13892 || VOID_TYPE_P (type)))
13893 return pedantic_non_lvalue_loc (loc, tem);
13894 return NULL_TREE;
13896 if (operand_equal_p (arg1, op2, 0))
13897 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13899 /* If we have A op B ? A : C, we may be able to convert this to a
13900 simpler expression, depending on the operation and the values
13901 of B and C. Signed zeros prevent all of these transformations,
13902 for reasons given above each one.
13904 Also try swapping the arguments and inverting the conditional. */
13905 if (COMPARISON_CLASS_P (arg0)
13906 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13907 arg1, TREE_OPERAND (arg0, 1))
13908 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13910 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13911 if (tem)
13912 return tem;
13915 if (COMPARISON_CLASS_P (arg0)
13916 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13917 op2,
13918 TREE_OPERAND (arg0, 1))
13919 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13921 location_t loc0 = expr_location_or (arg0, loc);
13922 tem = fold_truth_not_expr (loc0, arg0);
13923 if (tem && COMPARISON_CLASS_P (tem))
13925 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13926 if (tem)
13927 return tem;
13931 /* If the second operand is simpler than the third, swap them
13932 since that produces better jump optimization results. */
13933 if (truth_value_p (TREE_CODE (arg0))
13934 && tree_swap_operands_p (op1, op2, false))
13936 location_t loc0 = expr_location_or (arg0, loc);
13937 /* See if this can be inverted. If it can't, possibly because
13938 it was a floating-point inequality comparison, don't do
13939 anything. */
13940 tem = fold_truth_not_expr (loc0, arg0);
13941 if (tem)
13942 return fold_build3_loc (loc, code, type, tem, op2, op1);
13945 /* Convert A ? 1 : 0 to simply A. */
13946 if (integer_onep (op1)
13947 && integer_zerop (op2)
13948 /* If we try to convert OP0 to our type, the
13949 call to fold will try to move the conversion inside
13950 a COND, which will recurse. In that case, the COND_EXPR
13951 is probably the best choice, so leave it alone. */
13952 && type == TREE_TYPE (arg0))
13953 return pedantic_non_lvalue_loc (loc, arg0);
13955 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13956 over COND_EXPR in cases such as floating point comparisons. */
13957 if (integer_zerop (op1)
13958 && integer_onep (op2)
13959 && truth_value_p (TREE_CODE (arg0)))
13960 return pedantic_non_lvalue_loc (loc,
13961 fold_convert_loc (loc, type,
13962 invert_truthvalue_loc (loc,
13963 arg0)));
13965 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13966 if (TREE_CODE (arg0) == LT_EXPR
13967 && integer_zerop (TREE_OPERAND (arg0, 1))
13968 && integer_zerop (op2)
13969 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13971 /* sign_bit_p only checks ARG1 bits within A's precision.
13972 If <sign bit of A> has wider type than A, bits outside
13973 of A's precision in <sign bit of A> need to be checked.
13974 If they are all 0, this optimization needs to be done
13975 in unsigned A's type, if they are all 1 in signed A's type,
13976 otherwise this can't be done. */
13977 if (TYPE_PRECISION (TREE_TYPE (tem))
13978 < TYPE_PRECISION (TREE_TYPE (arg1))
13979 && TYPE_PRECISION (TREE_TYPE (tem))
13980 < TYPE_PRECISION (type))
13982 unsigned HOST_WIDE_INT mask_lo;
13983 HOST_WIDE_INT mask_hi;
13984 int inner_width, outer_width;
13985 tree tem_type;
13987 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13988 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13989 if (outer_width > TYPE_PRECISION (type))
13990 outer_width = TYPE_PRECISION (type);
13992 if (outer_width > HOST_BITS_PER_WIDE_INT)
13994 mask_hi = ((unsigned HOST_WIDE_INT) -1
13995 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13996 mask_lo = -1;
13998 else
14000 mask_hi = 0;
14001 mask_lo = ((unsigned HOST_WIDE_INT) -1
14002 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14004 if (inner_width > HOST_BITS_PER_WIDE_INT)
14006 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14007 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14008 mask_lo = 0;
14010 else
14011 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14012 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14014 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14015 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14017 tem_type = signed_type_for (TREE_TYPE (tem));
14018 tem = fold_convert_loc (loc, tem_type, tem);
14020 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14021 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14023 tem_type = unsigned_type_for (TREE_TYPE (tem));
14024 tem = fold_convert_loc (loc, tem_type, tem);
14026 else
14027 tem = NULL;
14030 if (tem)
14031 return
14032 fold_convert_loc (loc, type,
14033 fold_build2_loc (loc, BIT_AND_EXPR,
14034 TREE_TYPE (tem), tem,
14035 fold_convert_loc (loc,
14036 TREE_TYPE (tem),
14037 arg1)));
14040 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14041 already handled above. */
14042 if (TREE_CODE (arg0) == BIT_AND_EXPR
14043 && integer_onep (TREE_OPERAND (arg0, 1))
14044 && integer_zerop (op2)
14045 && integer_pow2p (arg1))
14047 tree tem = TREE_OPERAND (arg0, 0);
14048 STRIP_NOPS (tem);
14049 if (TREE_CODE (tem) == RSHIFT_EXPR
14050 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14051 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14052 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14053 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14054 TREE_OPERAND (tem, 0), arg1);
14057 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14058 is probably obsolete because the first operand should be a
14059 truth value (that's why we have the two cases above), but let's
14060 leave it in until we can confirm this for all front-ends. */
14061 if (integer_zerop (op2)
14062 && TREE_CODE (arg0) == NE_EXPR
14063 && integer_zerop (TREE_OPERAND (arg0, 1))
14064 && integer_pow2p (arg1)
14065 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14066 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14067 arg1, OEP_ONLY_CONST))
14068 return pedantic_non_lvalue_loc (loc,
14069 fold_convert_loc (loc, type,
14070 TREE_OPERAND (arg0, 0)));
14072 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14073 if (integer_zerop (op2)
14074 && truth_value_p (TREE_CODE (arg0))
14075 && truth_value_p (TREE_CODE (arg1)))
14076 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14077 fold_convert_loc (loc, type, arg0),
14078 arg1);
14080 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14081 if (integer_onep (op2)
14082 && truth_value_p (TREE_CODE (arg0))
14083 && truth_value_p (TREE_CODE (arg1)))
14085 location_t loc0 = expr_location_or (arg0, loc);
14086 /* Only perform transformation if ARG0 is easily inverted. */
14087 tem = fold_truth_not_expr (loc0, arg0);
14088 if (tem)
14089 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14090 fold_convert_loc (loc, type, tem),
14091 arg1);
14094 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14095 if (integer_zerop (arg1)
14096 && truth_value_p (TREE_CODE (arg0))
14097 && truth_value_p (TREE_CODE (op2)))
14099 location_t loc0 = expr_location_or (arg0, loc);
14100 /* Only perform transformation if ARG0 is easily inverted. */
14101 tem = fold_truth_not_expr (loc0, arg0);
14102 if (tem)
14103 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14104 fold_convert_loc (loc, type, tem),
14105 op2);
14108 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14109 if (integer_onep (arg1)
14110 && truth_value_p (TREE_CODE (arg0))
14111 && truth_value_p (TREE_CODE (op2)))
14112 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14113 fold_convert_loc (loc, type, arg0),
14114 op2);
14116 return NULL_TREE;
14118 case VEC_COND_EXPR:
14119 if (TREE_CODE (arg0) == VECTOR_CST)
14121 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14122 return pedantic_non_lvalue_loc (loc, op1);
14123 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14124 return pedantic_non_lvalue_loc (loc, op2);
14126 return NULL_TREE;
14128 case CALL_EXPR:
14129 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14130 of fold_ternary on them. */
14131 gcc_unreachable ();
14133 case BIT_FIELD_REF:
14134 if ((TREE_CODE (arg0) == VECTOR_CST
14135 || (TREE_CODE (arg0) == CONSTRUCTOR
14136 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14137 && (type == TREE_TYPE (TREE_TYPE (arg0))
14138 || (TREE_CODE (type) == VECTOR_TYPE
14139 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14141 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14142 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14143 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14144 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14146 if (n != 0
14147 && (idx % width) == 0
14148 && (n % width) == 0
14149 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14151 idx = idx / width;
14152 n = n / width;
14154 if (TREE_CODE (arg0) == VECTOR_CST)
14156 if (n == 1)
14157 return VECTOR_CST_ELT (arg0, idx);
14159 tree *vals = XALLOCAVEC (tree, n);
14160 for (unsigned i = 0; i < n; ++i)
14161 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14162 return build_vector (type, vals);
14165 /* Constructor elements can be subvectors. */
14166 unsigned HOST_WIDE_INT k = 1;
14167 if (CONSTRUCTOR_NELTS (arg0) != 0)
14169 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14170 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14171 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14174 /* We keep an exact subset of the constructor elements. */
14175 if ((idx % k) == 0 && (n % k) == 0)
14177 if (CONSTRUCTOR_NELTS (arg0) == 0)
14178 return build_constructor (type, NULL);
14179 idx /= k;
14180 n /= k;
14181 if (n == 1)
14183 if (idx < CONSTRUCTOR_NELTS (arg0))
14184 return CONSTRUCTOR_ELT (arg0, idx)->value;
14185 return build_zero_cst (type);
14188 vec<constructor_elt, va_gc> *vals;
14189 vec_alloc (vals, n);
14190 for (unsigned i = 0;
14191 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14192 ++i)
14193 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14194 CONSTRUCTOR_ELT
14195 (arg0, idx + i)->value);
14196 return build_constructor (type, vals);
14198 /* The bitfield references a single constructor element. */
14199 else if (idx + n <= (idx / k + 1) * k)
14201 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14202 return build_zero_cst (type);
14203 else if (n == k)
14204 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14205 else
14206 return fold_build3_loc (loc, code, type,
14207 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14208 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14213 /* A bit-field-ref that referenced the full argument can be stripped. */
14214 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14215 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14216 && integer_zerop (op2))
14217 return fold_convert_loc (loc, type, arg0);
14219 /* On constants we can use native encode/interpret to constant
14220 fold (nearly) all BIT_FIELD_REFs. */
14221 if (CONSTANT_CLASS_P (arg0)
14222 && can_native_interpret_type_p (type)
14223 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14224 /* This limitation should not be necessary, we just need to
14225 round this up to mode size. */
14226 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14227 /* Need bit-shifting of the buffer to relax the following. */
14228 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14230 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14231 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14232 unsigned HOST_WIDE_INT clen;
14233 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14234 /* ??? We cannot tell native_encode_expr to start at
14235 some random byte only. So limit us to a reasonable amount
14236 of work. */
14237 if (clen <= 4096)
14239 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14240 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14241 if (len > 0
14242 && len * BITS_PER_UNIT >= bitpos + bitsize)
14244 tree v = native_interpret_expr (type,
14245 b + bitpos / BITS_PER_UNIT,
14246 bitsize / BITS_PER_UNIT);
14247 if (v)
14248 return v;
14253 return NULL_TREE;
14255 case FMA_EXPR:
14256 /* For integers we can decompose the FMA if possible. */
14257 if (TREE_CODE (arg0) == INTEGER_CST
14258 && TREE_CODE (arg1) == INTEGER_CST)
14259 return fold_build2_loc (loc, PLUS_EXPR, type,
14260 const_binop (MULT_EXPR, arg0, arg1), arg2);
14261 if (integer_zerop (arg2))
14262 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14264 return fold_fma (loc, type, arg0, arg1, arg2);
14266 case VEC_PERM_EXPR:
14267 if (TREE_CODE (arg2) == VECTOR_CST)
14269 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14270 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14271 tree t;
14272 bool need_mask_canon = false;
14273 bool all_in_vec0 = true;
14274 bool all_in_vec1 = true;
14275 bool maybe_identity = true;
14276 bool single_arg = (op0 == op1);
14277 bool changed = false;
14279 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14280 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14281 for (i = 0; i < nelts; i++)
14283 tree val = VECTOR_CST_ELT (arg2, i);
14284 if (TREE_CODE (val) != INTEGER_CST)
14285 return NULL_TREE;
14287 sel[i] = TREE_INT_CST_LOW (val) & mask;
14288 if (TREE_INT_CST_HIGH (val)
14289 || ((unsigned HOST_WIDE_INT)
14290 TREE_INT_CST_LOW (val) != sel[i]))
14291 need_mask_canon = true;
14293 if (sel[i] < nelts)
14294 all_in_vec1 = false;
14295 else
14296 all_in_vec0 = false;
14298 if ((sel[i] & (nelts-1)) != i)
14299 maybe_identity = false;
14302 if (maybe_identity)
14304 if (all_in_vec0)
14305 return op0;
14306 if (all_in_vec1)
14307 return op1;
14310 if (all_in_vec0)
14311 op1 = op0;
14312 else if (all_in_vec1)
14314 op0 = op1;
14315 for (i = 0; i < nelts; i++)
14316 sel[i] -= nelts;
14317 need_mask_canon = true;
14320 if ((TREE_CODE (op0) == VECTOR_CST
14321 || TREE_CODE (op0) == CONSTRUCTOR)
14322 && (TREE_CODE (op1) == VECTOR_CST
14323 || TREE_CODE (op1) == CONSTRUCTOR))
14325 t = fold_vec_perm (type, op0, op1, sel);
14326 if (t != NULL_TREE)
14327 return t;
14330 if (op0 == op1 && !single_arg)
14331 changed = true;
14333 if (need_mask_canon && arg2 == op2)
14335 tree *tsel = XALLOCAVEC (tree, nelts);
14336 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14337 for (i = 0; i < nelts; i++)
14338 tsel[i] = build_int_cst (eltype, sel[i]);
14339 op2 = build_vector (TREE_TYPE (arg2), tsel);
14340 changed = true;
14343 if (changed)
14344 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14346 return NULL_TREE;
14348 default:
14349 return NULL_TREE;
14350 } /* switch (code) */
14353 /* Perform constant folding and related simplification of EXPR.
14354 The related simplifications include x*1 => x, x*0 => 0, etc.,
14355 and application of the associative law.
14356 NOP_EXPR conversions may be removed freely (as long as we
14357 are careful not to change the type of the overall expression).
14358 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14359 but we can constant-fold them if they have constant operands. */
14361 #ifdef ENABLE_FOLD_CHECKING
14362 # define fold(x) fold_1 (x)
14363 static tree fold_1 (tree);
14364 static
14365 #endif
14366 tree
14367 fold (tree expr)
14369 const tree t = expr;
14370 enum tree_code code = TREE_CODE (t);
14371 enum tree_code_class kind = TREE_CODE_CLASS (code);
14372 tree tem;
14373 location_t loc = EXPR_LOCATION (expr);
14375 /* Return right away if a constant. */
14376 if (kind == tcc_constant)
14377 return t;
14379 /* CALL_EXPR-like objects with variable numbers of operands are
14380 treated specially. */
14381 if (kind == tcc_vl_exp)
14383 if (code == CALL_EXPR)
14385 tem = fold_call_expr (loc, expr, false);
14386 return tem ? tem : expr;
14388 return expr;
14391 if (IS_EXPR_CODE_CLASS (kind))
14393 tree type = TREE_TYPE (t);
14394 tree op0, op1, op2;
14396 switch (TREE_CODE_LENGTH (code))
14398 case 1:
14399 op0 = TREE_OPERAND (t, 0);
14400 tem = fold_unary_loc (loc, code, type, op0);
14401 return tem ? tem : expr;
14402 case 2:
14403 op0 = TREE_OPERAND (t, 0);
14404 op1 = TREE_OPERAND (t, 1);
14405 tem = fold_binary_loc (loc, code, type, op0, op1);
14406 return tem ? tem : expr;
14407 case 3:
14408 op0 = TREE_OPERAND (t, 0);
14409 op1 = TREE_OPERAND (t, 1);
14410 op2 = TREE_OPERAND (t, 2);
14411 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14412 return tem ? tem : expr;
14413 default:
14414 break;
14418 switch (code)
14420 case ARRAY_REF:
14422 tree op0 = TREE_OPERAND (t, 0);
14423 tree op1 = TREE_OPERAND (t, 1);
14425 if (TREE_CODE (op1) == INTEGER_CST
14426 && TREE_CODE (op0) == CONSTRUCTOR
14427 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14429 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14430 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14431 unsigned HOST_WIDE_INT begin = 0;
14433 /* Find a matching index by means of a binary search. */
14434 while (begin != end)
14436 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14437 tree index = (*elts)[middle].index;
14439 if (TREE_CODE (index) == INTEGER_CST
14440 && tree_int_cst_lt (index, op1))
14441 begin = middle + 1;
14442 else if (TREE_CODE (index) == INTEGER_CST
14443 && tree_int_cst_lt (op1, index))
14444 end = middle;
14445 else if (TREE_CODE (index) == RANGE_EXPR
14446 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14447 begin = middle + 1;
14448 else if (TREE_CODE (index) == RANGE_EXPR
14449 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14450 end = middle;
14451 else
14452 return (*elts)[middle].value;
14456 return t;
14459 /* Return a VECTOR_CST if possible. */
14460 case CONSTRUCTOR:
14462 tree type = TREE_TYPE (t);
14463 if (TREE_CODE (type) != VECTOR_TYPE)
14464 return t;
14466 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14467 unsigned HOST_WIDE_INT idx, pos = 0;
14468 tree value;
14470 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14472 if (!CONSTANT_CLASS_P (value))
14473 return t;
14474 if (TREE_CODE (value) == VECTOR_CST)
14476 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14477 vec[pos++] = VECTOR_CST_ELT (value, i);
14479 else
14480 vec[pos++] = value;
14482 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14483 vec[pos] = build_zero_cst (TREE_TYPE (type));
14485 return build_vector (type, vec);
14488 case CONST_DECL:
14489 return fold (DECL_INITIAL (t));
14491 default:
14492 return t;
14493 } /* switch (code) */
14496 #ifdef ENABLE_FOLD_CHECKING
14497 #undef fold
14499 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14500 hash_table <pointer_hash <tree_node> >);
14501 static void fold_check_failed (const_tree, const_tree);
14502 void print_fold_checksum (const_tree);
14504 /* When --enable-checking=fold, compute a digest of expr before
14505 and after actual fold call to see if fold did not accidentally
14506 change original expr. */
14508 tree
14509 fold (tree expr)
14511 tree ret;
14512 struct md5_ctx ctx;
14513 unsigned char checksum_before[16], checksum_after[16];
14514 hash_table <pointer_hash <tree_node> > ht;
14516 ht.create (32);
14517 md5_init_ctx (&ctx);
14518 fold_checksum_tree (expr, &ctx, ht);
14519 md5_finish_ctx (&ctx, checksum_before);
14520 ht.empty ();
14522 ret = fold_1 (expr);
14524 md5_init_ctx (&ctx);
14525 fold_checksum_tree (expr, &ctx, ht);
14526 md5_finish_ctx (&ctx, checksum_after);
14527 ht.dispose ();
14529 if (memcmp (checksum_before, checksum_after, 16))
14530 fold_check_failed (expr, ret);
14532 return ret;
14535 void
14536 print_fold_checksum (const_tree expr)
14538 struct md5_ctx ctx;
14539 unsigned char checksum[16], cnt;
14540 hash_table <pointer_hash <tree_node> > ht;
14542 ht.create (32);
14543 md5_init_ctx (&ctx);
14544 fold_checksum_tree (expr, &ctx, ht);
14545 md5_finish_ctx (&ctx, checksum);
14546 ht.dispose ();
14547 for (cnt = 0; cnt < 16; ++cnt)
14548 fprintf (stderr, "%02x", checksum[cnt]);
14549 putc ('\n', stderr);
14552 static void
14553 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14555 internal_error ("fold check: original tree changed by fold");
14558 static void
14559 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14560 hash_table <pointer_hash <tree_node> > ht)
14562 tree_node **slot;
14563 enum tree_code code;
14564 union tree_node buf;
14565 int i, len;
14567 recursive_label:
14568 if (expr == NULL)
14569 return;
14570 slot = ht.find_slot (expr, INSERT);
14571 if (*slot != NULL)
14572 return;
14573 *slot = CONST_CAST_TREE (expr);
14574 code = TREE_CODE (expr);
14575 if (TREE_CODE_CLASS (code) == tcc_declaration
14576 && DECL_ASSEMBLER_NAME_SET_P (expr))
14578 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14579 memcpy ((char *) &buf, expr, tree_size (expr));
14580 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14581 expr = (tree) &buf;
14583 else if (TREE_CODE_CLASS (code) == tcc_type
14584 && (TYPE_POINTER_TO (expr)
14585 || TYPE_REFERENCE_TO (expr)
14586 || TYPE_CACHED_VALUES_P (expr)
14587 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14588 || TYPE_NEXT_VARIANT (expr)))
14590 /* Allow these fields to be modified. */
14591 tree tmp;
14592 memcpy ((char *) &buf, expr, tree_size (expr));
14593 expr = tmp = (tree) &buf;
14594 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14595 TYPE_POINTER_TO (tmp) = NULL;
14596 TYPE_REFERENCE_TO (tmp) = NULL;
14597 TYPE_NEXT_VARIANT (tmp) = NULL;
14598 if (TYPE_CACHED_VALUES_P (tmp))
14600 TYPE_CACHED_VALUES_P (tmp) = 0;
14601 TYPE_CACHED_VALUES (tmp) = NULL;
14604 md5_process_bytes (expr, tree_size (expr), ctx);
14605 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14606 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14607 if (TREE_CODE_CLASS (code) != tcc_type
14608 && TREE_CODE_CLASS (code) != tcc_declaration
14609 && code != TREE_LIST
14610 && code != SSA_NAME
14611 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14612 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14613 switch (TREE_CODE_CLASS (code))
14615 case tcc_constant:
14616 switch (code)
14618 case STRING_CST:
14619 md5_process_bytes (TREE_STRING_POINTER (expr),
14620 TREE_STRING_LENGTH (expr), ctx);
14621 break;
14622 case COMPLEX_CST:
14623 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14624 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14625 break;
14626 case VECTOR_CST:
14627 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14628 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14629 break;
14630 default:
14631 break;
14633 break;
14634 case tcc_exceptional:
14635 switch (code)
14637 case TREE_LIST:
14638 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14639 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14640 expr = TREE_CHAIN (expr);
14641 goto recursive_label;
14642 break;
14643 case TREE_VEC:
14644 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14645 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14646 break;
14647 default:
14648 break;
14650 break;
14651 case tcc_expression:
14652 case tcc_reference:
14653 case tcc_comparison:
14654 case tcc_unary:
14655 case tcc_binary:
14656 case tcc_statement:
14657 case tcc_vl_exp:
14658 len = TREE_OPERAND_LENGTH (expr);
14659 for (i = 0; i < len; ++i)
14660 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14661 break;
14662 case tcc_declaration:
14663 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14664 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14665 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14667 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14668 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14669 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14670 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14671 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14673 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14674 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14676 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14678 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14679 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14680 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14682 break;
14683 case tcc_type:
14684 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14685 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14686 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14687 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14688 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14689 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14690 if (INTEGRAL_TYPE_P (expr)
14691 || SCALAR_FLOAT_TYPE_P (expr))
14693 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14694 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14696 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14697 if (TREE_CODE (expr) == RECORD_TYPE
14698 || TREE_CODE (expr) == UNION_TYPE
14699 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14700 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14701 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14702 break;
14703 default:
14704 break;
14708 /* Helper function for outputting the checksum of a tree T. When
14709 debugging with gdb, you can "define mynext" to be "next" followed
14710 by "call debug_fold_checksum (op0)", then just trace down till the
14711 outputs differ. */
14713 DEBUG_FUNCTION void
14714 debug_fold_checksum (const_tree t)
14716 int i;
14717 unsigned char checksum[16];
14718 struct md5_ctx ctx;
14719 hash_table <pointer_hash <tree_node> > ht;
14720 ht.create (32);
14722 md5_init_ctx (&ctx);
14723 fold_checksum_tree (t, &ctx, ht);
14724 md5_finish_ctx (&ctx, checksum);
14725 ht.empty ();
14727 for (i = 0; i < 16; i++)
14728 fprintf (stderr, "%d ", checksum[i]);
14730 fprintf (stderr, "\n");
14733 #endif
14735 /* Fold a unary tree expression with code CODE of type TYPE with an
14736 operand OP0. LOC is the location of the resulting expression.
14737 Return a folded expression if successful. Otherwise, return a tree
14738 expression with code CODE of type TYPE with an operand OP0. */
14740 tree
14741 fold_build1_stat_loc (location_t loc,
14742 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14744 tree tem;
14745 #ifdef ENABLE_FOLD_CHECKING
14746 unsigned char checksum_before[16], checksum_after[16];
14747 struct md5_ctx ctx;
14748 hash_table <pointer_hash <tree_node> > ht;
14750 ht.create (32);
14751 md5_init_ctx (&ctx);
14752 fold_checksum_tree (op0, &ctx, ht);
14753 md5_finish_ctx (&ctx, checksum_before);
14754 ht.empty ();
14755 #endif
14757 tem = fold_unary_loc (loc, code, type, op0);
14758 if (!tem)
14759 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14761 #ifdef ENABLE_FOLD_CHECKING
14762 md5_init_ctx (&ctx);
14763 fold_checksum_tree (op0, &ctx, ht);
14764 md5_finish_ctx (&ctx, checksum_after);
14765 ht.dispose ();
14767 if (memcmp (checksum_before, checksum_after, 16))
14768 fold_check_failed (op0, tem);
14769 #endif
14770 return tem;
14773 /* Fold a binary tree expression with code CODE of type TYPE with
14774 operands OP0 and OP1. LOC is the location of the resulting
14775 expression. Return a folded expression if successful. Otherwise,
14776 return a tree expression with code CODE of type TYPE with operands
14777 OP0 and OP1. */
14779 tree
14780 fold_build2_stat_loc (location_t loc,
14781 enum tree_code code, tree type, tree op0, tree op1
14782 MEM_STAT_DECL)
14784 tree tem;
14785 #ifdef ENABLE_FOLD_CHECKING
14786 unsigned char checksum_before_op0[16],
14787 checksum_before_op1[16],
14788 checksum_after_op0[16],
14789 checksum_after_op1[16];
14790 struct md5_ctx ctx;
14791 hash_table <pointer_hash <tree_node> > ht;
14793 ht.create (32);
14794 md5_init_ctx (&ctx);
14795 fold_checksum_tree (op0, &ctx, ht);
14796 md5_finish_ctx (&ctx, checksum_before_op0);
14797 ht.empty ();
14799 md5_init_ctx (&ctx);
14800 fold_checksum_tree (op1, &ctx, ht);
14801 md5_finish_ctx (&ctx, checksum_before_op1);
14802 ht.empty ();
14803 #endif
14805 tem = fold_binary_loc (loc, code, type, op0, op1);
14806 if (!tem)
14807 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14809 #ifdef ENABLE_FOLD_CHECKING
14810 md5_init_ctx (&ctx);
14811 fold_checksum_tree (op0, &ctx, ht);
14812 md5_finish_ctx (&ctx, checksum_after_op0);
14813 ht.empty ();
14815 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14816 fold_check_failed (op0, tem);
14818 md5_init_ctx (&ctx);
14819 fold_checksum_tree (op1, &ctx, ht);
14820 md5_finish_ctx (&ctx, checksum_after_op1);
14821 ht.dispose ();
14823 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14824 fold_check_failed (op1, tem);
14825 #endif
14826 return tem;
14829 /* Fold a ternary tree expression with code CODE of type TYPE with
14830 operands OP0, OP1, and OP2. Return a folded expression if
14831 successful. Otherwise, return a tree expression with code CODE of
14832 type TYPE with operands OP0, OP1, and OP2. */
14834 tree
14835 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14836 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14838 tree tem;
14839 #ifdef ENABLE_FOLD_CHECKING
14840 unsigned char checksum_before_op0[16],
14841 checksum_before_op1[16],
14842 checksum_before_op2[16],
14843 checksum_after_op0[16],
14844 checksum_after_op1[16],
14845 checksum_after_op2[16];
14846 struct md5_ctx ctx;
14847 hash_table <pointer_hash <tree_node> > ht;
14849 ht.create (32);
14850 md5_init_ctx (&ctx);
14851 fold_checksum_tree (op0, &ctx, ht);
14852 md5_finish_ctx (&ctx, checksum_before_op0);
14853 ht.empty ();
14855 md5_init_ctx (&ctx);
14856 fold_checksum_tree (op1, &ctx, ht);
14857 md5_finish_ctx (&ctx, checksum_before_op1);
14858 ht.empty ();
14860 md5_init_ctx (&ctx);
14861 fold_checksum_tree (op2, &ctx, ht);
14862 md5_finish_ctx (&ctx, checksum_before_op2);
14863 ht.empty ();
14864 #endif
14866 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14867 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14868 if (!tem)
14869 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14871 #ifdef ENABLE_FOLD_CHECKING
14872 md5_init_ctx (&ctx);
14873 fold_checksum_tree (op0, &ctx, ht);
14874 md5_finish_ctx (&ctx, checksum_after_op0);
14875 ht.empty ();
14877 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14878 fold_check_failed (op0, tem);
14880 md5_init_ctx (&ctx);
14881 fold_checksum_tree (op1, &ctx, ht);
14882 md5_finish_ctx (&ctx, checksum_after_op1);
14883 ht.empty ();
14885 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14886 fold_check_failed (op1, tem);
14888 md5_init_ctx (&ctx);
14889 fold_checksum_tree (op2, &ctx, ht);
14890 md5_finish_ctx (&ctx, checksum_after_op2);
14891 ht.dispose ();
14893 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14894 fold_check_failed (op2, tem);
14895 #endif
14896 return tem;
14899 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14900 arguments in ARGARRAY, and a null static chain.
14901 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14902 of type TYPE from the given operands as constructed by build_call_array. */
14904 tree
14905 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14906 int nargs, tree *argarray)
14908 tree tem;
14909 #ifdef ENABLE_FOLD_CHECKING
14910 unsigned char checksum_before_fn[16],
14911 checksum_before_arglist[16],
14912 checksum_after_fn[16],
14913 checksum_after_arglist[16];
14914 struct md5_ctx ctx;
14915 hash_table <pointer_hash <tree_node> > ht;
14916 int i;
14918 ht.create (32);
14919 md5_init_ctx (&ctx);
14920 fold_checksum_tree (fn, &ctx, ht);
14921 md5_finish_ctx (&ctx, checksum_before_fn);
14922 ht.empty ();
14924 md5_init_ctx (&ctx);
14925 for (i = 0; i < nargs; i++)
14926 fold_checksum_tree (argarray[i], &ctx, ht);
14927 md5_finish_ctx (&ctx, checksum_before_arglist);
14928 ht.empty ();
14929 #endif
14931 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14933 #ifdef ENABLE_FOLD_CHECKING
14934 md5_init_ctx (&ctx);
14935 fold_checksum_tree (fn, &ctx, ht);
14936 md5_finish_ctx (&ctx, checksum_after_fn);
14937 ht.empty ();
14939 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14940 fold_check_failed (fn, tem);
14942 md5_init_ctx (&ctx);
14943 for (i = 0; i < nargs; i++)
14944 fold_checksum_tree (argarray[i], &ctx, ht);
14945 md5_finish_ctx (&ctx, checksum_after_arglist);
14946 ht.dispose ();
14948 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14949 fold_check_failed (NULL_TREE, tem);
14950 #endif
14951 return tem;
14954 /* Perform constant folding and related simplification of initializer
14955 expression EXPR. These behave identically to "fold_buildN" but ignore
14956 potential run-time traps and exceptions that fold must preserve. */
14958 #define START_FOLD_INIT \
14959 int saved_signaling_nans = flag_signaling_nans;\
14960 int saved_trapping_math = flag_trapping_math;\
14961 int saved_rounding_math = flag_rounding_math;\
14962 int saved_trapv = flag_trapv;\
14963 int saved_folding_initializer = folding_initializer;\
14964 flag_signaling_nans = 0;\
14965 flag_trapping_math = 0;\
14966 flag_rounding_math = 0;\
14967 flag_trapv = 0;\
14968 folding_initializer = 1;
14970 #define END_FOLD_INIT \
14971 flag_signaling_nans = saved_signaling_nans;\
14972 flag_trapping_math = saved_trapping_math;\
14973 flag_rounding_math = saved_rounding_math;\
14974 flag_trapv = saved_trapv;\
14975 folding_initializer = saved_folding_initializer;
14977 tree
14978 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14979 tree type, tree op)
14981 tree result;
14982 START_FOLD_INIT;
14984 result = fold_build1_loc (loc, code, type, op);
14986 END_FOLD_INIT;
14987 return result;
14990 tree
14991 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14992 tree type, tree op0, tree op1)
14994 tree result;
14995 START_FOLD_INIT;
14997 result = fold_build2_loc (loc, code, type, op0, op1);
14999 END_FOLD_INIT;
15000 return result;
15003 tree
15004 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15005 tree type, tree op0, tree op1, tree op2)
15007 tree result;
15008 START_FOLD_INIT;
15010 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15012 END_FOLD_INIT;
15013 return result;
15016 tree
15017 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15018 int nargs, tree *argarray)
15020 tree result;
15021 START_FOLD_INIT;
15023 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15025 END_FOLD_INIT;
15026 return result;
15029 #undef START_FOLD_INIT
15030 #undef END_FOLD_INIT
15032 /* Determine if first argument is a multiple of second argument. Return 0 if
15033 it is not, or we cannot easily determined it to be.
15035 An example of the sort of thing we care about (at this point; this routine
15036 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15037 fold cases do now) is discovering that
15039 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15041 is a multiple of
15043 SAVE_EXPR (J * 8)
15045 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15047 This code also handles discovering that
15049 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15051 is a multiple of 8 so we don't have to worry about dealing with a
15052 possible remainder.
15054 Note that we *look* inside a SAVE_EXPR only to determine how it was
15055 calculated; it is not safe for fold to do much of anything else with the
15056 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15057 at run time. For example, the latter example above *cannot* be implemented
15058 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15059 evaluation time of the original SAVE_EXPR is not necessarily the same at
15060 the time the new expression is evaluated. The only optimization of this
15061 sort that would be valid is changing
15063 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15065 divided by 8 to
15067 SAVE_EXPR (I) * SAVE_EXPR (J)
15069 (where the same SAVE_EXPR (J) is used in the original and the
15070 transformed version). */
15073 multiple_of_p (tree type, const_tree top, const_tree bottom)
15075 if (operand_equal_p (top, bottom, 0))
15076 return 1;
15078 if (TREE_CODE (type) != INTEGER_TYPE)
15079 return 0;
15081 switch (TREE_CODE (top))
15083 case BIT_AND_EXPR:
15084 /* Bitwise and provides a power of two multiple. If the mask is
15085 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15086 if (!integer_pow2p (bottom))
15087 return 0;
15088 /* FALLTHRU */
15090 case MULT_EXPR:
15091 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15092 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15094 case PLUS_EXPR:
15095 case MINUS_EXPR:
15096 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15097 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15099 case LSHIFT_EXPR:
15100 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15102 tree op1, t1;
15104 op1 = TREE_OPERAND (top, 1);
15105 /* const_binop may not detect overflow correctly,
15106 so check for it explicitly here. */
15107 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15108 > TREE_INT_CST_LOW (op1)
15109 && TREE_INT_CST_HIGH (op1) == 0
15110 && 0 != (t1 = fold_convert (type,
15111 const_binop (LSHIFT_EXPR,
15112 size_one_node,
15113 op1)))
15114 && !TREE_OVERFLOW (t1))
15115 return multiple_of_p (type, t1, bottom);
15117 return 0;
15119 case NOP_EXPR:
15120 /* Can't handle conversions from non-integral or wider integral type. */
15121 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15122 || (TYPE_PRECISION (type)
15123 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15124 return 0;
15126 /* .. fall through ... */
15128 case SAVE_EXPR:
15129 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15131 case COND_EXPR:
15132 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15133 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15135 case INTEGER_CST:
15136 if (TREE_CODE (bottom) != INTEGER_CST
15137 || integer_zerop (bottom)
15138 || (TYPE_UNSIGNED (type)
15139 && (tree_int_cst_sgn (top) < 0
15140 || tree_int_cst_sgn (bottom) < 0)))
15141 return 0;
15142 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15143 top, bottom));
15145 default:
15146 return 0;
15150 /* Return true if CODE or TYPE is known to be non-negative. */
15152 static bool
15153 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15155 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15156 && truth_value_p (code))
15157 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15158 have a signed:1 type (where the value is -1 and 0). */
15159 return true;
15160 return false;
15163 /* Return true if (CODE OP0) is known to be non-negative. If the return
15164 value is based on the assumption that signed overflow is undefined,
15165 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15166 *STRICT_OVERFLOW_P. */
15168 bool
15169 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15170 bool *strict_overflow_p)
15172 if (TYPE_UNSIGNED (type))
15173 return true;
15175 switch (code)
15177 case ABS_EXPR:
15178 /* We can't return 1 if flag_wrapv is set because
15179 ABS_EXPR<INT_MIN> = INT_MIN. */
15180 if (!INTEGRAL_TYPE_P (type))
15181 return true;
15182 if (TYPE_OVERFLOW_UNDEFINED (type))
15184 *strict_overflow_p = true;
15185 return true;
15187 break;
15189 case NON_LVALUE_EXPR:
15190 case FLOAT_EXPR:
15191 case FIX_TRUNC_EXPR:
15192 return tree_expr_nonnegative_warnv_p (op0,
15193 strict_overflow_p);
15195 case NOP_EXPR:
15197 tree inner_type = TREE_TYPE (op0);
15198 tree outer_type = type;
15200 if (TREE_CODE (outer_type) == REAL_TYPE)
15202 if (TREE_CODE (inner_type) == REAL_TYPE)
15203 return tree_expr_nonnegative_warnv_p (op0,
15204 strict_overflow_p);
15205 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15207 if (TYPE_UNSIGNED (inner_type))
15208 return true;
15209 return tree_expr_nonnegative_warnv_p (op0,
15210 strict_overflow_p);
15213 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15215 if (TREE_CODE (inner_type) == REAL_TYPE)
15216 return tree_expr_nonnegative_warnv_p (op0,
15217 strict_overflow_p);
15218 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15219 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15220 && TYPE_UNSIGNED (inner_type);
15223 break;
15225 default:
15226 return tree_simple_nonnegative_warnv_p (code, type);
15229 /* We don't know sign of `t', so be conservative and return false. */
15230 return false;
15233 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15234 value is based on the assumption that signed overflow is undefined,
15235 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15236 *STRICT_OVERFLOW_P. */
15238 bool
15239 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15240 tree op1, bool *strict_overflow_p)
15242 if (TYPE_UNSIGNED (type))
15243 return true;
15245 switch (code)
15247 case POINTER_PLUS_EXPR:
15248 case PLUS_EXPR:
15249 if (FLOAT_TYPE_P (type))
15250 return (tree_expr_nonnegative_warnv_p (op0,
15251 strict_overflow_p)
15252 && tree_expr_nonnegative_warnv_p (op1,
15253 strict_overflow_p));
15255 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15256 both unsigned and at least 2 bits shorter than the result. */
15257 if (TREE_CODE (type) == INTEGER_TYPE
15258 && TREE_CODE (op0) == NOP_EXPR
15259 && TREE_CODE (op1) == NOP_EXPR)
15261 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15262 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15263 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15264 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15266 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15267 TYPE_PRECISION (inner2)) + 1;
15268 return prec < TYPE_PRECISION (type);
15271 break;
15273 case MULT_EXPR:
15274 if (FLOAT_TYPE_P (type))
15276 /* x * x for floating point x is always non-negative. */
15277 if (operand_equal_p (op0, op1, 0))
15278 return true;
15279 return (tree_expr_nonnegative_warnv_p (op0,
15280 strict_overflow_p)
15281 && tree_expr_nonnegative_warnv_p (op1,
15282 strict_overflow_p));
15285 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15286 both unsigned and their total bits is shorter than the result. */
15287 if (TREE_CODE (type) == INTEGER_TYPE
15288 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15289 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15291 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15292 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15293 : TREE_TYPE (op0);
15294 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15295 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15296 : TREE_TYPE (op1);
15298 bool unsigned0 = TYPE_UNSIGNED (inner0);
15299 bool unsigned1 = TYPE_UNSIGNED (inner1);
15301 if (TREE_CODE (op0) == INTEGER_CST)
15302 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15304 if (TREE_CODE (op1) == INTEGER_CST)
15305 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15307 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15308 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15310 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15311 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15312 : TYPE_PRECISION (inner0);
15314 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15315 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15316 : TYPE_PRECISION (inner1);
15318 return precision0 + precision1 < TYPE_PRECISION (type);
15321 return false;
15323 case BIT_AND_EXPR:
15324 case MAX_EXPR:
15325 return (tree_expr_nonnegative_warnv_p (op0,
15326 strict_overflow_p)
15327 || tree_expr_nonnegative_warnv_p (op1,
15328 strict_overflow_p));
15330 case BIT_IOR_EXPR:
15331 case BIT_XOR_EXPR:
15332 case MIN_EXPR:
15333 case RDIV_EXPR:
15334 case TRUNC_DIV_EXPR:
15335 case CEIL_DIV_EXPR:
15336 case FLOOR_DIV_EXPR:
15337 case ROUND_DIV_EXPR:
15338 return (tree_expr_nonnegative_warnv_p (op0,
15339 strict_overflow_p)
15340 && tree_expr_nonnegative_warnv_p (op1,
15341 strict_overflow_p));
15343 case TRUNC_MOD_EXPR:
15344 case CEIL_MOD_EXPR:
15345 case FLOOR_MOD_EXPR:
15346 case ROUND_MOD_EXPR:
15347 return tree_expr_nonnegative_warnv_p (op0,
15348 strict_overflow_p);
15349 default:
15350 return tree_simple_nonnegative_warnv_p (code, type);
15353 /* We don't know sign of `t', so be conservative and return false. */
15354 return false;
15357 /* Return true if T is known to be non-negative. If the return
15358 value is based on the assumption that signed overflow is undefined,
15359 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15360 *STRICT_OVERFLOW_P. */
15362 bool
15363 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15365 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15366 return true;
15368 switch (TREE_CODE (t))
15370 case INTEGER_CST:
15371 return tree_int_cst_sgn (t) >= 0;
15373 case REAL_CST:
15374 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15376 case FIXED_CST:
15377 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15379 case COND_EXPR:
15380 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15381 strict_overflow_p)
15382 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15383 strict_overflow_p));
15384 default:
15385 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15386 TREE_TYPE (t));
15388 /* We don't know sign of `t', so be conservative and return false. */
15389 return false;
15392 /* Return true if T is known to be non-negative. If the return
15393 value is based on the assumption that signed overflow is undefined,
15394 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15395 *STRICT_OVERFLOW_P. */
15397 bool
15398 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15399 tree arg0, tree arg1, bool *strict_overflow_p)
15401 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15402 switch (DECL_FUNCTION_CODE (fndecl))
15404 CASE_FLT_FN (BUILT_IN_ACOS):
15405 CASE_FLT_FN (BUILT_IN_ACOSH):
15406 CASE_FLT_FN (BUILT_IN_CABS):
15407 CASE_FLT_FN (BUILT_IN_COSH):
15408 CASE_FLT_FN (BUILT_IN_ERFC):
15409 CASE_FLT_FN (BUILT_IN_EXP):
15410 CASE_FLT_FN (BUILT_IN_EXP10):
15411 CASE_FLT_FN (BUILT_IN_EXP2):
15412 CASE_FLT_FN (BUILT_IN_FABS):
15413 CASE_FLT_FN (BUILT_IN_FDIM):
15414 CASE_FLT_FN (BUILT_IN_HYPOT):
15415 CASE_FLT_FN (BUILT_IN_POW10):
15416 CASE_INT_FN (BUILT_IN_FFS):
15417 CASE_INT_FN (BUILT_IN_PARITY):
15418 CASE_INT_FN (BUILT_IN_POPCOUNT):
15419 case BUILT_IN_BSWAP32:
15420 case BUILT_IN_BSWAP64:
15421 /* Always true. */
15422 return true;
15424 CASE_FLT_FN (BUILT_IN_SQRT):
15425 /* sqrt(-0.0) is -0.0. */
15426 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15427 return true;
15428 return tree_expr_nonnegative_warnv_p (arg0,
15429 strict_overflow_p);
15431 CASE_FLT_FN (BUILT_IN_ASINH):
15432 CASE_FLT_FN (BUILT_IN_ATAN):
15433 CASE_FLT_FN (BUILT_IN_ATANH):
15434 CASE_FLT_FN (BUILT_IN_CBRT):
15435 CASE_FLT_FN (BUILT_IN_CEIL):
15436 CASE_FLT_FN (BUILT_IN_ERF):
15437 CASE_FLT_FN (BUILT_IN_EXPM1):
15438 CASE_FLT_FN (BUILT_IN_FLOOR):
15439 CASE_FLT_FN (BUILT_IN_FMOD):
15440 CASE_FLT_FN (BUILT_IN_FREXP):
15441 CASE_FLT_FN (BUILT_IN_ICEIL):
15442 CASE_FLT_FN (BUILT_IN_IFLOOR):
15443 CASE_FLT_FN (BUILT_IN_IRINT):
15444 CASE_FLT_FN (BUILT_IN_IROUND):
15445 CASE_FLT_FN (BUILT_IN_LCEIL):
15446 CASE_FLT_FN (BUILT_IN_LDEXP):
15447 CASE_FLT_FN (BUILT_IN_LFLOOR):
15448 CASE_FLT_FN (BUILT_IN_LLCEIL):
15449 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15450 CASE_FLT_FN (BUILT_IN_LLRINT):
15451 CASE_FLT_FN (BUILT_IN_LLROUND):
15452 CASE_FLT_FN (BUILT_IN_LRINT):
15453 CASE_FLT_FN (BUILT_IN_LROUND):
15454 CASE_FLT_FN (BUILT_IN_MODF):
15455 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15456 CASE_FLT_FN (BUILT_IN_RINT):
15457 CASE_FLT_FN (BUILT_IN_ROUND):
15458 CASE_FLT_FN (BUILT_IN_SCALB):
15459 CASE_FLT_FN (BUILT_IN_SCALBLN):
15460 CASE_FLT_FN (BUILT_IN_SCALBN):
15461 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15462 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15463 CASE_FLT_FN (BUILT_IN_SINH):
15464 CASE_FLT_FN (BUILT_IN_TANH):
15465 CASE_FLT_FN (BUILT_IN_TRUNC):
15466 /* True if the 1st argument is nonnegative. */
15467 return tree_expr_nonnegative_warnv_p (arg0,
15468 strict_overflow_p);
15470 CASE_FLT_FN (BUILT_IN_FMAX):
15471 /* True if the 1st OR 2nd arguments are nonnegative. */
15472 return (tree_expr_nonnegative_warnv_p (arg0,
15473 strict_overflow_p)
15474 || (tree_expr_nonnegative_warnv_p (arg1,
15475 strict_overflow_p)));
15477 CASE_FLT_FN (BUILT_IN_FMIN):
15478 /* True if the 1st AND 2nd arguments are nonnegative. */
15479 return (tree_expr_nonnegative_warnv_p (arg0,
15480 strict_overflow_p)
15481 && (tree_expr_nonnegative_warnv_p (arg1,
15482 strict_overflow_p)));
15484 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15485 /* True if the 2nd argument is nonnegative. */
15486 return tree_expr_nonnegative_warnv_p (arg1,
15487 strict_overflow_p);
15489 CASE_FLT_FN (BUILT_IN_POWI):
15490 /* True if the 1st argument is nonnegative or the second
15491 argument is an even integer. */
15492 if (TREE_CODE (arg1) == INTEGER_CST
15493 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15494 return true;
15495 return tree_expr_nonnegative_warnv_p (arg0,
15496 strict_overflow_p);
15498 CASE_FLT_FN (BUILT_IN_POW):
15499 /* True if the 1st argument is nonnegative or the second
15500 argument is an even integer valued real. */
15501 if (TREE_CODE (arg1) == REAL_CST)
15503 REAL_VALUE_TYPE c;
15504 HOST_WIDE_INT n;
15506 c = TREE_REAL_CST (arg1);
15507 n = real_to_integer (&c);
15508 if ((n & 1) == 0)
15510 REAL_VALUE_TYPE cint;
15511 real_from_integer (&cint, VOIDmode, n,
15512 n < 0 ? -1 : 0, 0);
15513 if (real_identical (&c, &cint))
15514 return true;
15517 return tree_expr_nonnegative_warnv_p (arg0,
15518 strict_overflow_p);
15520 default:
15521 break;
15523 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15524 type);
15527 /* Return true if T is known to be non-negative. If the return
15528 value is based on the assumption that signed overflow is undefined,
15529 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15530 *STRICT_OVERFLOW_P. */
15532 bool
15533 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15535 enum tree_code code = TREE_CODE (t);
15536 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15537 return true;
15539 switch (code)
15541 case TARGET_EXPR:
15543 tree temp = TARGET_EXPR_SLOT (t);
15544 t = TARGET_EXPR_INITIAL (t);
15546 /* If the initializer is non-void, then it's a normal expression
15547 that will be assigned to the slot. */
15548 if (!VOID_TYPE_P (t))
15549 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15551 /* Otherwise, the initializer sets the slot in some way. One common
15552 way is an assignment statement at the end of the initializer. */
15553 while (1)
15555 if (TREE_CODE (t) == BIND_EXPR)
15556 t = expr_last (BIND_EXPR_BODY (t));
15557 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15558 || TREE_CODE (t) == TRY_CATCH_EXPR)
15559 t = expr_last (TREE_OPERAND (t, 0));
15560 else if (TREE_CODE (t) == STATEMENT_LIST)
15561 t = expr_last (t);
15562 else
15563 break;
15565 if (TREE_CODE (t) == MODIFY_EXPR
15566 && TREE_OPERAND (t, 0) == temp)
15567 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15568 strict_overflow_p);
15570 return false;
15573 case CALL_EXPR:
15575 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15576 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15578 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15579 get_callee_fndecl (t),
15580 arg0,
15581 arg1,
15582 strict_overflow_p);
15584 case COMPOUND_EXPR:
15585 case MODIFY_EXPR:
15586 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15587 strict_overflow_p);
15588 case BIND_EXPR:
15589 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15590 strict_overflow_p);
15591 case SAVE_EXPR:
15592 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15593 strict_overflow_p);
15595 default:
15596 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15597 TREE_TYPE (t));
15600 /* We don't know sign of `t', so be conservative and return false. */
15601 return false;
15604 /* Return true if T is known to be non-negative. If the return
15605 value is based on the assumption that signed overflow is undefined,
15606 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15607 *STRICT_OVERFLOW_P. */
15609 bool
15610 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15612 enum tree_code code;
15613 if (t == error_mark_node)
15614 return false;
15616 code = TREE_CODE (t);
15617 switch (TREE_CODE_CLASS (code))
15619 case tcc_binary:
15620 case tcc_comparison:
15621 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15622 TREE_TYPE (t),
15623 TREE_OPERAND (t, 0),
15624 TREE_OPERAND (t, 1),
15625 strict_overflow_p);
15627 case tcc_unary:
15628 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15629 TREE_TYPE (t),
15630 TREE_OPERAND (t, 0),
15631 strict_overflow_p);
15633 case tcc_constant:
15634 case tcc_declaration:
15635 case tcc_reference:
15636 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15638 default:
15639 break;
15642 switch (code)
15644 case TRUTH_AND_EXPR:
15645 case TRUTH_OR_EXPR:
15646 case TRUTH_XOR_EXPR:
15647 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15648 TREE_TYPE (t),
15649 TREE_OPERAND (t, 0),
15650 TREE_OPERAND (t, 1),
15651 strict_overflow_p);
15652 case TRUTH_NOT_EXPR:
15653 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15654 TREE_TYPE (t),
15655 TREE_OPERAND (t, 0),
15656 strict_overflow_p);
15658 case COND_EXPR:
15659 case CONSTRUCTOR:
15660 case OBJ_TYPE_REF:
15661 case ASSERT_EXPR:
15662 case ADDR_EXPR:
15663 case WITH_SIZE_EXPR:
15664 case SSA_NAME:
15665 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15667 default:
15668 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15672 /* Return true if `t' is known to be non-negative. Handle warnings
15673 about undefined signed overflow. */
15675 bool
15676 tree_expr_nonnegative_p (tree t)
15678 bool ret, strict_overflow_p;
15680 strict_overflow_p = false;
15681 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15682 if (strict_overflow_p)
15683 fold_overflow_warning (("assuming signed overflow does not occur when "
15684 "determining that expression is always "
15685 "non-negative"),
15686 WARN_STRICT_OVERFLOW_MISC);
15687 return ret;
15691 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15692 For floating point we further ensure that T is not denormal.
15693 Similar logic is present in nonzero_address in rtlanal.h.
15695 If the return value is based on the assumption that signed overflow
15696 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15697 change *STRICT_OVERFLOW_P. */
15699 bool
15700 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15701 bool *strict_overflow_p)
15703 switch (code)
15705 case ABS_EXPR:
15706 return tree_expr_nonzero_warnv_p (op0,
15707 strict_overflow_p);
15709 case NOP_EXPR:
15711 tree inner_type = TREE_TYPE (op0);
15712 tree outer_type = type;
15714 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15715 && tree_expr_nonzero_warnv_p (op0,
15716 strict_overflow_p));
15718 break;
15720 case NON_LVALUE_EXPR:
15721 return tree_expr_nonzero_warnv_p (op0,
15722 strict_overflow_p);
15724 default:
15725 break;
15728 return false;
15731 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15732 For floating point we further ensure that T is not denormal.
15733 Similar logic is present in nonzero_address in rtlanal.h.
15735 If the return value is based on the assumption that signed overflow
15736 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15737 change *STRICT_OVERFLOW_P. */
15739 bool
15740 tree_binary_nonzero_warnv_p (enum tree_code code,
15741 tree type,
15742 tree op0,
15743 tree op1, bool *strict_overflow_p)
15745 bool sub_strict_overflow_p;
15746 switch (code)
15748 case POINTER_PLUS_EXPR:
15749 case PLUS_EXPR:
15750 if (TYPE_OVERFLOW_UNDEFINED (type))
15752 /* With the presence of negative values it is hard
15753 to say something. */
15754 sub_strict_overflow_p = false;
15755 if (!tree_expr_nonnegative_warnv_p (op0,
15756 &sub_strict_overflow_p)
15757 || !tree_expr_nonnegative_warnv_p (op1,
15758 &sub_strict_overflow_p))
15759 return false;
15760 /* One of operands must be positive and the other non-negative. */
15761 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15762 overflows, on a twos-complement machine the sum of two
15763 nonnegative numbers can never be zero. */
15764 return (tree_expr_nonzero_warnv_p (op0,
15765 strict_overflow_p)
15766 || tree_expr_nonzero_warnv_p (op1,
15767 strict_overflow_p));
15769 break;
15771 case MULT_EXPR:
15772 if (TYPE_OVERFLOW_UNDEFINED (type))
15774 if (tree_expr_nonzero_warnv_p (op0,
15775 strict_overflow_p)
15776 && tree_expr_nonzero_warnv_p (op1,
15777 strict_overflow_p))
15779 *strict_overflow_p = true;
15780 return true;
15783 break;
15785 case MIN_EXPR:
15786 sub_strict_overflow_p = false;
15787 if (tree_expr_nonzero_warnv_p (op0,
15788 &sub_strict_overflow_p)
15789 && tree_expr_nonzero_warnv_p (op1,
15790 &sub_strict_overflow_p))
15792 if (sub_strict_overflow_p)
15793 *strict_overflow_p = true;
15795 break;
15797 case MAX_EXPR:
15798 sub_strict_overflow_p = false;
15799 if (tree_expr_nonzero_warnv_p (op0,
15800 &sub_strict_overflow_p))
15802 if (sub_strict_overflow_p)
15803 *strict_overflow_p = true;
15805 /* When both operands are nonzero, then MAX must be too. */
15806 if (tree_expr_nonzero_warnv_p (op1,
15807 strict_overflow_p))
15808 return true;
15810 /* MAX where operand 0 is positive is positive. */
15811 return tree_expr_nonnegative_warnv_p (op0,
15812 strict_overflow_p);
15814 /* MAX where operand 1 is positive is positive. */
15815 else if (tree_expr_nonzero_warnv_p (op1,
15816 &sub_strict_overflow_p)
15817 && tree_expr_nonnegative_warnv_p (op1,
15818 &sub_strict_overflow_p))
15820 if (sub_strict_overflow_p)
15821 *strict_overflow_p = true;
15822 return true;
15824 break;
15826 case BIT_IOR_EXPR:
15827 return (tree_expr_nonzero_warnv_p (op1,
15828 strict_overflow_p)
15829 || tree_expr_nonzero_warnv_p (op0,
15830 strict_overflow_p));
15832 default:
15833 break;
15836 return false;
15839 /* Return true when T is an address and is known to be nonzero.
15840 For floating point we further ensure that T is not denormal.
15841 Similar logic is present in nonzero_address in rtlanal.h.
15843 If the return value is based on the assumption that signed overflow
15844 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15845 change *STRICT_OVERFLOW_P. */
15847 bool
15848 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15850 bool sub_strict_overflow_p;
15851 switch (TREE_CODE (t))
15853 case INTEGER_CST:
15854 return !integer_zerop (t);
15856 case ADDR_EXPR:
15858 tree base = TREE_OPERAND (t, 0);
15859 if (!DECL_P (base))
15860 base = get_base_address (base);
15862 if (!base)
15863 return false;
15865 /* Weak declarations may link to NULL. Other things may also be NULL
15866 so protect with -fdelete-null-pointer-checks; but not variables
15867 allocated on the stack. */
15868 if (DECL_P (base)
15869 && (flag_delete_null_pointer_checks
15870 || (DECL_CONTEXT (base)
15871 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15872 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15873 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15875 /* Constants are never weak. */
15876 if (CONSTANT_CLASS_P (base))
15877 return true;
15879 return false;
15882 case COND_EXPR:
15883 sub_strict_overflow_p = false;
15884 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15885 &sub_strict_overflow_p)
15886 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15887 &sub_strict_overflow_p))
15889 if (sub_strict_overflow_p)
15890 *strict_overflow_p = true;
15891 return true;
15893 break;
15895 default:
15896 break;
15898 return false;
15901 /* Return true when T is an address and is known to be nonzero.
15902 For floating point we further ensure that T is not denormal.
15903 Similar logic is present in nonzero_address in rtlanal.h.
15905 If the return value is based on the assumption that signed overflow
15906 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15907 change *STRICT_OVERFLOW_P. */
15909 bool
15910 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15912 tree type = TREE_TYPE (t);
15913 enum tree_code code;
15915 /* Doing something useful for floating point would need more work. */
15916 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15917 return false;
15919 code = TREE_CODE (t);
15920 switch (TREE_CODE_CLASS (code))
15922 case tcc_unary:
15923 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15924 strict_overflow_p);
15925 case tcc_binary:
15926 case tcc_comparison:
15927 return tree_binary_nonzero_warnv_p (code, type,
15928 TREE_OPERAND (t, 0),
15929 TREE_OPERAND (t, 1),
15930 strict_overflow_p);
15931 case tcc_constant:
15932 case tcc_declaration:
15933 case tcc_reference:
15934 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15936 default:
15937 break;
15940 switch (code)
15942 case TRUTH_NOT_EXPR:
15943 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15944 strict_overflow_p);
15946 case TRUTH_AND_EXPR:
15947 case TRUTH_OR_EXPR:
15948 case TRUTH_XOR_EXPR:
15949 return tree_binary_nonzero_warnv_p (code, type,
15950 TREE_OPERAND (t, 0),
15951 TREE_OPERAND (t, 1),
15952 strict_overflow_p);
15954 case COND_EXPR:
15955 case CONSTRUCTOR:
15956 case OBJ_TYPE_REF:
15957 case ASSERT_EXPR:
15958 case ADDR_EXPR:
15959 case WITH_SIZE_EXPR:
15960 case SSA_NAME:
15961 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15963 case COMPOUND_EXPR:
15964 case MODIFY_EXPR:
15965 case BIND_EXPR:
15966 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15967 strict_overflow_p);
15969 case SAVE_EXPR:
15970 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15971 strict_overflow_p);
15973 case CALL_EXPR:
15974 return alloca_call_p (t);
15976 default:
15977 break;
15979 return false;
15982 /* Return true when T is an address and is known to be nonzero.
15983 Handle warnings about undefined signed overflow. */
15985 bool
15986 tree_expr_nonzero_p (tree t)
15988 bool ret, strict_overflow_p;
15990 strict_overflow_p = false;
15991 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15992 if (strict_overflow_p)
15993 fold_overflow_warning (("assuming signed overflow does not occur when "
15994 "determining that expression is always "
15995 "non-zero"),
15996 WARN_STRICT_OVERFLOW_MISC);
15997 return ret;
16000 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16001 attempt to fold the expression to a constant without modifying TYPE,
16002 OP0 or OP1.
16004 If the expression could be simplified to a constant, then return
16005 the constant. If the expression would not be simplified to a
16006 constant, then return NULL_TREE. */
16008 tree
16009 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16011 tree tem = fold_binary (code, type, op0, op1);
16012 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16015 /* Given the components of a unary expression CODE, TYPE and OP0,
16016 attempt to fold the expression to a constant without modifying
16017 TYPE or OP0.
16019 If the expression could be simplified to a constant, then return
16020 the constant. If the expression would not be simplified to a
16021 constant, then return NULL_TREE. */
16023 tree
16024 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16026 tree tem = fold_unary (code, type, op0);
16027 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16030 /* If EXP represents referencing an element in a constant string
16031 (either via pointer arithmetic or array indexing), return the
16032 tree representing the value accessed, otherwise return NULL. */
16034 tree
16035 fold_read_from_constant_string (tree exp)
16037 if ((TREE_CODE (exp) == INDIRECT_REF
16038 || TREE_CODE (exp) == ARRAY_REF)
16039 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16041 tree exp1 = TREE_OPERAND (exp, 0);
16042 tree index;
16043 tree string;
16044 location_t loc = EXPR_LOCATION (exp);
16046 if (TREE_CODE (exp) == INDIRECT_REF)
16047 string = string_constant (exp1, &index);
16048 else
16050 tree low_bound = array_ref_low_bound (exp);
16051 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16053 /* Optimize the special-case of a zero lower bound.
16055 We convert the low_bound to sizetype to avoid some problems
16056 with constant folding. (E.g. suppose the lower bound is 1,
16057 and its mode is QI. Without the conversion,l (ARRAY
16058 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16059 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16060 if (! integer_zerop (low_bound))
16061 index = size_diffop_loc (loc, index,
16062 fold_convert_loc (loc, sizetype, low_bound));
16064 string = exp1;
16067 if (string
16068 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16069 && TREE_CODE (string) == STRING_CST
16070 && TREE_CODE (index) == INTEGER_CST
16071 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16072 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16073 == MODE_INT)
16074 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16075 return build_int_cst_type (TREE_TYPE (exp),
16076 (TREE_STRING_POINTER (string)
16077 [TREE_INT_CST_LOW (index)]));
16079 return NULL;
16082 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16083 an integer constant, real, or fixed-point constant.
16085 TYPE is the type of the result. */
16087 static tree
16088 fold_negate_const (tree arg0, tree type)
16090 tree t = NULL_TREE;
16092 switch (TREE_CODE (arg0))
16094 case INTEGER_CST:
16096 double_int val = tree_to_double_int (arg0);
16097 bool overflow;
16098 val = val.neg_with_overflow (&overflow);
16099 t = force_fit_type_double (type, val, 1,
16100 (overflow | TREE_OVERFLOW (arg0))
16101 && !TYPE_UNSIGNED (type));
16102 break;
16105 case REAL_CST:
16106 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16107 break;
16109 case FIXED_CST:
16111 FIXED_VALUE_TYPE f;
16112 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16113 &(TREE_FIXED_CST (arg0)), NULL,
16114 TYPE_SATURATING (type));
16115 t = build_fixed (type, f);
16116 /* Propagate overflow flags. */
16117 if (overflow_p | TREE_OVERFLOW (arg0))
16118 TREE_OVERFLOW (t) = 1;
16119 break;
16122 default:
16123 gcc_unreachable ();
16126 return t;
16129 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16130 an integer constant or real constant.
16132 TYPE is the type of the result. */
16134 tree
16135 fold_abs_const (tree arg0, tree type)
16137 tree t = NULL_TREE;
16139 switch (TREE_CODE (arg0))
16141 case INTEGER_CST:
16143 double_int val = tree_to_double_int (arg0);
16145 /* If the value is unsigned or non-negative, then the absolute value
16146 is the same as the ordinary value. */
16147 if (TYPE_UNSIGNED (type)
16148 || !val.is_negative ())
16149 t = arg0;
16151 /* If the value is negative, then the absolute value is
16152 its negation. */
16153 else
16155 bool overflow;
16156 val = val.neg_with_overflow (&overflow);
16157 t = force_fit_type_double (type, val, -1,
16158 overflow | TREE_OVERFLOW (arg0));
16161 break;
16163 case REAL_CST:
16164 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16165 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16166 else
16167 t = arg0;
16168 break;
16170 default:
16171 gcc_unreachable ();
16174 return t;
16177 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16178 constant. TYPE is the type of the result. */
16180 static tree
16181 fold_not_const (const_tree arg0, tree type)
16183 double_int val;
16185 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16187 val = ~tree_to_double_int (arg0);
16188 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16191 /* Given CODE, a relational operator, the target type, TYPE and two
16192 constant operands OP0 and OP1, return the result of the
16193 relational operation. If the result is not a compile time
16194 constant, then return NULL_TREE. */
16196 static tree
16197 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16199 int result, invert;
16201 /* From here on, the only cases we handle are when the result is
16202 known to be a constant. */
16204 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16206 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16207 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16209 /* Handle the cases where either operand is a NaN. */
16210 if (real_isnan (c0) || real_isnan (c1))
16212 switch (code)
16214 case EQ_EXPR:
16215 case ORDERED_EXPR:
16216 result = 0;
16217 break;
16219 case NE_EXPR:
16220 case UNORDERED_EXPR:
16221 case UNLT_EXPR:
16222 case UNLE_EXPR:
16223 case UNGT_EXPR:
16224 case UNGE_EXPR:
16225 case UNEQ_EXPR:
16226 result = 1;
16227 break;
16229 case LT_EXPR:
16230 case LE_EXPR:
16231 case GT_EXPR:
16232 case GE_EXPR:
16233 case LTGT_EXPR:
16234 if (flag_trapping_math)
16235 return NULL_TREE;
16236 result = 0;
16237 break;
16239 default:
16240 gcc_unreachable ();
16243 return constant_boolean_node (result, type);
16246 return constant_boolean_node (real_compare (code, c0, c1), type);
16249 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16251 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16252 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16253 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16256 /* Handle equality/inequality of complex constants. */
16257 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16259 tree rcond = fold_relational_const (code, type,
16260 TREE_REALPART (op0),
16261 TREE_REALPART (op1));
16262 tree icond = fold_relational_const (code, type,
16263 TREE_IMAGPART (op0),
16264 TREE_IMAGPART (op1));
16265 if (code == EQ_EXPR)
16266 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16267 else if (code == NE_EXPR)
16268 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16269 else
16270 return NULL_TREE;
16273 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16275 unsigned count = VECTOR_CST_NELTS (op0);
16276 tree *elts = XALLOCAVEC (tree, count);
16277 gcc_assert (VECTOR_CST_NELTS (op1) == count
16278 && TYPE_VECTOR_SUBPARTS (type) == count);
16280 for (unsigned i = 0; i < count; i++)
16282 tree elem_type = TREE_TYPE (type);
16283 tree elem0 = VECTOR_CST_ELT (op0, i);
16284 tree elem1 = VECTOR_CST_ELT (op1, i);
16286 tree tem = fold_relational_const (code, elem_type,
16287 elem0, elem1);
16289 if (tem == NULL_TREE)
16290 return NULL_TREE;
16292 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16295 return build_vector (type, elts);
16298 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16300 To compute GT, swap the arguments and do LT.
16301 To compute GE, do LT and invert the result.
16302 To compute LE, swap the arguments, do LT and invert the result.
16303 To compute NE, do EQ and invert the result.
16305 Therefore, the code below must handle only EQ and LT. */
16307 if (code == LE_EXPR || code == GT_EXPR)
16309 tree tem = op0;
16310 op0 = op1;
16311 op1 = tem;
16312 code = swap_tree_comparison (code);
16315 /* Note that it is safe to invert for real values here because we
16316 have already handled the one case that it matters. */
16318 invert = 0;
16319 if (code == NE_EXPR || code == GE_EXPR)
16321 invert = 1;
16322 code = invert_tree_comparison (code, false);
16325 /* Compute a result for LT or EQ if args permit;
16326 Otherwise return T. */
16327 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16329 if (code == EQ_EXPR)
16330 result = tree_int_cst_equal (op0, op1);
16331 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16332 result = INT_CST_LT_UNSIGNED (op0, op1);
16333 else
16334 result = INT_CST_LT (op0, op1);
16336 else
16337 return NULL_TREE;
16339 if (invert)
16340 result ^= 1;
16341 return constant_boolean_node (result, type);
16344 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16345 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16346 itself. */
16348 tree
16349 fold_build_cleanup_point_expr (tree type, tree expr)
16351 /* If the expression does not have side effects then we don't have to wrap
16352 it with a cleanup point expression. */
16353 if (!TREE_SIDE_EFFECTS (expr))
16354 return expr;
16356 /* If the expression is a return, check to see if the expression inside the
16357 return has no side effects or the right hand side of the modify expression
16358 inside the return. If either don't have side effects set we don't need to
16359 wrap the expression in a cleanup point expression. Note we don't check the
16360 left hand side of the modify because it should always be a return decl. */
16361 if (TREE_CODE (expr) == RETURN_EXPR)
16363 tree op = TREE_OPERAND (expr, 0);
16364 if (!op || !TREE_SIDE_EFFECTS (op))
16365 return expr;
16366 op = TREE_OPERAND (op, 1);
16367 if (!TREE_SIDE_EFFECTS (op))
16368 return expr;
16371 return build1 (CLEANUP_POINT_EXPR, type, expr);
16374 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16375 of an indirection through OP0, or NULL_TREE if no simplification is
16376 possible. */
16378 tree
16379 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16381 tree sub = op0;
16382 tree subtype;
16384 STRIP_NOPS (sub);
16385 subtype = TREE_TYPE (sub);
16386 if (!POINTER_TYPE_P (subtype))
16387 return NULL_TREE;
16389 if (TREE_CODE (sub) == ADDR_EXPR)
16391 tree op = TREE_OPERAND (sub, 0);
16392 tree optype = TREE_TYPE (op);
16393 /* *&CONST_DECL -> to the value of the const decl. */
16394 if (TREE_CODE (op) == CONST_DECL)
16395 return DECL_INITIAL (op);
16396 /* *&p => p; make sure to handle *&"str"[cst] here. */
16397 if (type == optype)
16399 tree fop = fold_read_from_constant_string (op);
16400 if (fop)
16401 return fop;
16402 else
16403 return op;
16405 /* *(foo *)&fooarray => fooarray[0] */
16406 else if (TREE_CODE (optype) == ARRAY_TYPE
16407 && type == TREE_TYPE (optype)
16408 && (!in_gimple_form
16409 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16411 tree type_domain = TYPE_DOMAIN (optype);
16412 tree min_val = size_zero_node;
16413 if (type_domain && TYPE_MIN_VALUE (type_domain))
16414 min_val = TYPE_MIN_VALUE (type_domain);
16415 if (in_gimple_form
16416 && TREE_CODE (min_val) != INTEGER_CST)
16417 return NULL_TREE;
16418 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16419 NULL_TREE, NULL_TREE);
16421 /* *(foo *)&complexfoo => __real__ complexfoo */
16422 else if (TREE_CODE (optype) == COMPLEX_TYPE
16423 && type == TREE_TYPE (optype))
16424 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16425 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16426 else if (TREE_CODE (optype) == VECTOR_TYPE
16427 && type == TREE_TYPE (optype))
16429 tree part_width = TYPE_SIZE (type);
16430 tree index = bitsize_int (0);
16431 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16435 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16436 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16438 tree op00 = TREE_OPERAND (sub, 0);
16439 tree op01 = TREE_OPERAND (sub, 1);
16441 STRIP_NOPS (op00);
16442 if (TREE_CODE (op00) == ADDR_EXPR)
16444 tree op00type;
16445 op00 = TREE_OPERAND (op00, 0);
16446 op00type = TREE_TYPE (op00);
16448 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16449 if (TREE_CODE (op00type) == VECTOR_TYPE
16450 && type == TREE_TYPE (op00type))
16452 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16453 tree part_width = TYPE_SIZE (type);
16454 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16455 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16456 tree index = bitsize_int (indexi);
16458 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16459 return fold_build3_loc (loc,
16460 BIT_FIELD_REF, type, op00,
16461 part_width, index);
16464 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16465 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16466 && type == TREE_TYPE (op00type))
16468 tree size = TYPE_SIZE_UNIT (type);
16469 if (tree_int_cst_equal (size, op01))
16470 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16472 /* ((foo *)&fooarray)[1] => fooarray[1] */
16473 else if (TREE_CODE (op00type) == ARRAY_TYPE
16474 && type == TREE_TYPE (op00type))
16476 tree type_domain = TYPE_DOMAIN (op00type);
16477 tree min_val = size_zero_node;
16478 if (type_domain && TYPE_MIN_VALUE (type_domain))
16479 min_val = TYPE_MIN_VALUE (type_domain);
16480 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16481 TYPE_SIZE_UNIT (type));
16482 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16483 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16484 NULL_TREE, NULL_TREE);
16489 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16490 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16491 && type == TREE_TYPE (TREE_TYPE (subtype))
16492 && (!in_gimple_form
16493 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16495 tree type_domain;
16496 tree min_val = size_zero_node;
16497 sub = build_fold_indirect_ref_loc (loc, sub);
16498 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16499 if (type_domain && TYPE_MIN_VALUE (type_domain))
16500 min_val = TYPE_MIN_VALUE (type_domain);
16501 if (in_gimple_form
16502 && TREE_CODE (min_val) != INTEGER_CST)
16503 return NULL_TREE;
16504 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16505 NULL_TREE);
16508 return NULL_TREE;
16511 /* Builds an expression for an indirection through T, simplifying some
16512 cases. */
16514 tree
16515 build_fold_indirect_ref_loc (location_t loc, tree t)
16517 tree type = TREE_TYPE (TREE_TYPE (t));
16518 tree sub = fold_indirect_ref_1 (loc, type, t);
16520 if (sub)
16521 return sub;
16523 return build1_loc (loc, INDIRECT_REF, type, t);
16526 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16528 tree
16529 fold_indirect_ref_loc (location_t loc, tree t)
16531 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16533 if (sub)
16534 return sub;
16535 else
16536 return t;
16539 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16540 whose result is ignored. The type of the returned tree need not be
16541 the same as the original expression. */
16543 tree
16544 fold_ignored_result (tree t)
16546 if (!TREE_SIDE_EFFECTS (t))
16547 return integer_zero_node;
16549 for (;;)
16550 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16552 case tcc_unary:
16553 t = TREE_OPERAND (t, 0);
16554 break;
16556 case tcc_binary:
16557 case tcc_comparison:
16558 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16559 t = TREE_OPERAND (t, 0);
16560 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16561 t = TREE_OPERAND (t, 1);
16562 else
16563 return t;
16564 break;
16566 case tcc_expression:
16567 switch (TREE_CODE (t))
16569 case COMPOUND_EXPR:
16570 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16571 return t;
16572 t = TREE_OPERAND (t, 0);
16573 break;
16575 case COND_EXPR:
16576 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16577 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16578 return t;
16579 t = TREE_OPERAND (t, 0);
16580 break;
16582 default:
16583 return t;
16585 break;
16587 default:
16588 return t;
16592 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16593 This can only be applied to objects of a sizetype. */
16595 tree
16596 round_up_loc (location_t loc, tree value, int divisor)
16598 tree div = NULL_TREE;
16600 gcc_assert (divisor > 0);
16601 if (divisor == 1)
16602 return value;
16604 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16605 have to do anything. Only do this when we are not given a const,
16606 because in that case, this check is more expensive than just
16607 doing it. */
16608 if (TREE_CODE (value) != INTEGER_CST)
16610 div = build_int_cst (TREE_TYPE (value), divisor);
16612 if (multiple_of_p (TREE_TYPE (value), value, div))
16613 return value;
16616 /* If divisor is a power of two, simplify this to bit manipulation. */
16617 if (divisor == (divisor & -divisor))
16619 if (TREE_CODE (value) == INTEGER_CST)
16621 double_int val = tree_to_double_int (value);
16622 bool overflow_p;
16624 if ((val.low & (divisor - 1)) == 0)
16625 return value;
16627 overflow_p = TREE_OVERFLOW (value);
16628 val.low &= ~(divisor - 1);
16629 val.low += divisor;
16630 if (val.low == 0)
16632 val.high++;
16633 if (val.high == 0)
16634 overflow_p = true;
16637 return force_fit_type_double (TREE_TYPE (value), val,
16638 -1, overflow_p);
16640 else
16642 tree t;
16644 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16645 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16646 t = build_int_cst (TREE_TYPE (value), -divisor);
16647 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16650 else
16652 if (!div)
16653 div = build_int_cst (TREE_TYPE (value), divisor);
16654 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16655 value = size_binop_loc (loc, MULT_EXPR, value, div);
16658 return value;
16661 /* Likewise, but round down. */
16663 tree
16664 round_down_loc (location_t loc, tree value, int divisor)
16666 tree div = NULL_TREE;
16668 gcc_assert (divisor > 0);
16669 if (divisor == 1)
16670 return value;
16672 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16673 have to do anything. Only do this when we are not given a const,
16674 because in that case, this check is more expensive than just
16675 doing it. */
16676 if (TREE_CODE (value) != INTEGER_CST)
16678 div = build_int_cst (TREE_TYPE (value), divisor);
16680 if (multiple_of_p (TREE_TYPE (value), value, div))
16681 return value;
16684 /* If divisor is a power of two, simplify this to bit manipulation. */
16685 if (divisor == (divisor & -divisor))
16687 tree t;
16689 t = build_int_cst (TREE_TYPE (value), -divisor);
16690 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16692 else
16694 if (!div)
16695 div = build_int_cst (TREE_TYPE (value), divisor);
16696 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16697 value = size_binop_loc (loc, MULT_EXPR, value, div);
16700 return value;
16703 /* Returns the pointer to the base of the object addressed by EXP and
16704 extracts the information about the offset of the access, storing it
16705 to PBITPOS and POFFSET. */
16707 static tree
16708 split_address_to_core_and_offset (tree exp,
16709 HOST_WIDE_INT *pbitpos, tree *poffset)
16711 tree core;
16712 enum machine_mode mode;
16713 int unsignedp, volatilep;
16714 HOST_WIDE_INT bitsize;
16715 location_t loc = EXPR_LOCATION (exp);
16717 if (TREE_CODE (exp) == ADDR_EXPR)
16719 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16720 poffset, &mode, &unsignedp, &volatilep,
16721 false);
16722 core = build_fold_addr_expr_loc (loc, core);
16724 else
16726 core = exp;
16727 *pbitpos = 0;
16728 *poffset = NULL_TREE;
16731 return core;
16734 /* Returns true if addresses of E1 and E2 differ by a constant, false
16735 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16737 bool
16738 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16740 tree core1, core2;
16741 HOST_WIDE_INT bitpos1, bitpos2;
16742 tree toffset1, toffset2, tdiff, type;
16744 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16745 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16747 if (bitpos1 % BITS_PER_UNIT != 0
16748 || bitpos2 % BITS_PER_UNIT != 0
16749 || !operand_equal_p (core1, core2, 0))
16750 return false;
16752 if (toffset1 && toffset2)
16754 type = TREE_TYPE (toffset1);
16755 if (type != TREE_TYPE (toffset2))
16756 toffset2 = fold_convert (type, toffset2);
16758 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16759 if (!cst_and_fits_in_hwi (tdiff))
16760 return false;
16762 *diff = int_cst_value (tdiff);
16764 else if (toffset1 || toffset2)
16766 /* If only one of the offsets is non-constant, the difference cannot
16767 be a constant. */
16768 return false;
16770 else
16771 *diff = 0;
16773 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16774 return true;
16777 /* Simplify the floating point expression EXP when the sign of the
16778 result is not significant. Return NULL_TREE if no simplification
16779 is possible. */
16781 tree
16782 fold_strip_sign_ops (tree exp)
16784 tree arg0, arg1;
16785 location_t loc = EXPR_LOCATION (exp);
16787 switch (TREE_CODE (exp))
16789 case ABS_EXPR:
16790 case NEGATE_EXPR:
16791 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16792 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16794 case MULT_EXPR:
16795 case RDIV_EXPR:
16796 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16797 return NULL_TREE;
16798 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16799 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16800 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16801 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16802 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16803 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16804 break;
16806 case COMPOUND_EXPR:
16807 arg0 = TREE_OPERAND (exp, 0);
16808 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16809 if (arg1)
16810 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16811 break;
16813 case COND_EXPR:
16814 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16815 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16816 if (arg0 || arg1)
16817 return fold_build3_loc (loc,
16818 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16819 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16820 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16821 break;
16823 case CALL_EXPR:
16825 const enum built_in_function fcode = builtin_mathfn_code (exp);
16826 switch (fcode)
16828 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16829 /* Strip copysign function call, return the 1st argument. */
16830 arg0 = CALL_EXPR_ARG (exp, 0);
16831 arg1 = CALL_EXPR_ARG (exp, 1);
16832 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16834 default:
16835 /* Strip sign ops from the argument of "odd" math functions. */
16836 if (negate_mathfn_p (fcode))
16838 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16839 if (arg0)
16840 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16842 break;
16845 break;
16847 default:
16848 break;
16850 return NULL_TREE;