2013-05-16 Marc Glisse <marc.glisse@inria.fr>
[official-gcc.git] / gcc / fold-const.c
blob97b5216eb7115b032d8a2c6f3983b55aca96b346
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
454 /* Fall through. */
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
478 case NOP_EXPR:
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type) == REAL_TYPE)
482 tree tem = strip_float_extensions (t);
483 if (tem != t)
484 return negate_expr_p (tem);
486 break;
488 case CALL_EXPR:
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t)))
491 return negate_expr_p (CALL_EXPR_ARG (t, 0));
492 break;
494 case RSHIFT_EXPR:
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
498 tree op1 = TREE_OPERAND (t, 1);
499 if (TREE_INT_CST_HIGH (op1) == 0
500 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
501 == TREE_INT_CST_LOW (op1))
502 return true;
504 break;
506 default:
507 break;
509 return false;
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
515 returned. */
517 static tree
518 fold_negate_expr (location_t loc, tree t)
520 tree type = TREE_TYPE (t);
521 tree tem;
523 switch (TREE_CODE (t))
525 /* Convert - (~A) to A + 1. */
526 case BIT_NOT_EXPR:
527 if (INTEGRAL_TYPE_P (type))
528 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
529 build_one_cst (type));
530 break;
532 case INTEGER_CST:
533 tem = fold_negate_const (t, type);
534 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
535 || !TYPE_OVERFLOW_TRAPS (type))
536 return tem;
537 break;
539 case REAL_CST:
540 tem = fold_negate_const (t, type);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 return tem;
544 break;
546 case FIXED_CST:
547 tem = fold_negate_const (t, type);
548 return tem;
550 case COMPLEX_CST:
552 tree rpart = negate_expr (TREE_REALPART (t));
553 tree ipart = negate_expr (TREE_IMAGPART (t));
555 if ((TREE_CODE (rpart) == REAL_CST
556 && TREE_CODE (ipart) == REAL_CST)
557 || (TREE_CODE (rpart) == INTEGER_CST
558 && TREE_CODE (ipart) == INTEGER_CST))
559 return build_complex (type, rpart, ipart);
561 break;
563 case COMPLEX_EXPR:
564 if (negate_expr_p (t))
565 return fold_build2_loc (loc, COMPLEX_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
567 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
568 break;
570 case CONJ_EXPR:
571 if (negate_expr_p (t))
572 return fold_build1_loc (loc, CONJ_EXPR, type,
573 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
574 break;
576 case NEGATE_EXPR:
577 return TREE_OPERAND (t, 0);
579 case PLUS_EXPR:
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t, 1))
585 && reorder_operands_p (TREE_OPERAND (t, 0),
586 TREE_OPERAND (t, 1)))
588 tem = negate_expr (TREE_OPERAND (t, 1));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t, 0)))
596 tem = negate_expr (TREE_OPERAND (t, 0));
597 return fold_build2_loc (loc, MINUS_EXPR, type,
598 tem, TREE_OPERAND (t, 1));
601 break;
603 case MINUS_EXPR:
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
607 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
610 break;
612 case MULT_EXPR:
613 if (TYPE_UNSIGNED (type))
614 break;
616 /* Fall through. */
618 case RDIV_EXPR:
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
621 tem = TREE_OPERAND (t, 1);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 TREE_OPERAND (t, 0), negate_expr (tem));
625 tem = TREE_OPERAND (t, 0);
626 if (negate_expr_p (tem))
627 return fold_build2_loc (loc, TREE_CODE (t), type,
628 negate_expr (tem), TREE_OPERAND (t, 1));
630 break;
632 case TRUNC_DIV_EXPR:
633 case ROUND_DIV_EXPR:
634 case FLOOR_DIV_EXPR:
635 case CEIL_DIV_EXPR:
636 case EXACT_DIV_EXPR:
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
641 overflow. */
642 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
644 const char * const warnmsg = G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
649 if (INTEGRAL_TYPE_P (type)
650 && (TREE_CODE (tem) != INTEGER_CST
651 || integer_onep (tem)))
652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
659 if (INTEGRAL_TYPE_P (type)
660 && (TREE_CODE (tem) != INTEGER_CST
661 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
667 break;
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
677 break;
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
684 tree fndecl, arg;
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
690 break;
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
696 tree op1 = TREE_OPERAND (t, 1);
697 if (TREE_INT_CST_HIGH (op1) == 0
698 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
699 == TREE_INT_CST_LOW (op1))
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
709 break;
711 default:
712 break;
715 return NULL_TREE;
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
720 return NULL_TREE. */
722 static tree
723 negate_expr (tree t)
725 tree type, tem;
726 location_t loc;
728 if (t == NULL_TREE)
729 return NULL_TREE;
731 loc = EXPR_LOCATION (t);
732 type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
735 tem = fold_negate_expr (loc, t);
736 if (!tem)
737 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
738 return fold_convert_loc (loc, type, tem);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
765 tree var = 0;
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
822 else if (TREE_CODE (in) == BIT_NOT_EXPR
823 && code == PLUS_EXPR)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp = build_one_cst (TREE_TYPE (in));
827 var = negate_expr (TREE_OPERAND (in, 0));
829 else if (TREE_CONSTANT (in))
830 *conp = in;
831 else
832 var = in;
834 if (negate_p)
836 if (*litp)
837 *minus_litp = *litp, *litp = 0;
838 else if (*minus_litp)
839 *litp = *minus_litp, *minus_litp = 0;
840 *conp = negate_expr (*conp);
841 var = negate_expr (var);
844 return var;
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
852 static tree
853 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
855 if (t1 == 0)
856 return t2;
857 else if (t2 == 0)
858 return t1;
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
864 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
866 if (code == PLUS_EXPR)
868 if (TREE_CODE (t1) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t2),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t1, 0)));
873 else if (TREE_CODE (t2) == NEGATE_EXPR)
874 return build2_loc (loc, MINUS_EXPR, type,
875 fold_convert_loc (loc, type, t1),
876 fold_convert_loc (loc, type,
877 TREE_OPERAND (t2, 0)));
878 else if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
881 else if (code == MINUS_EXPR)
883 if (integer_zerop (t2))
884 return fold_convert_loc (loc, type, t1);
887 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
888 fold_convert_loc (loc, type, t2));
891 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
898 static bool
899 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
901 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
902 return false;
903 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
904 return false;
906 switch (code)
908 case LSHIFT_EXPR:
909 case RSHIFT_EXPR:
910 case LROTATE_EXPR:
911 case RROTATE_EXPR:
912 return true;
914 default:
915 break;
918 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
919 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
920 && TYPE_MODE (type1) == TYPE_MODE (type2);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
928 static tree
929 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
930 int overflowable)
932 double_int op1, op2, res, tmp;
933 tree t;
934 tree type = TREE_TYPE (arg1);
935 bool uns = TYPE_UNSIGNED (type);
936 bool overflow = false;
938 op1 = tree_to_double_int (arg1);
939 op2 = tree_to_double_int (arg2);
941 switch (code)
943 case BIT_IOR_EXPR:
944 res = op1 | op2;
945 break;
947 case BIT_XOR_EXPR:
948 res = op1 ^ op2;
949 break;
951 case BIT_AND_EXPR:
952 res = op1 & op2;
953 break;
955 case RSHIFT_EXPR:
956 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
957 break;
959 case LSHIFT_EXPR:
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
964 break;
966 case RROTATE_EXPR:
967 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
968 break;
970 case LROTATE_EXPR:
971 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
972 break;
974 case PLUS_EXPR:
975 res = op1.add_with_sign (op2, false, &overflow);
976 break;
978 case MINUS_EXPR:
979 res = op1.sub_with_overflow (op2, &overflow);
980 break;
982 case MULT_EXPR:
983 res = op1.mul_with_sign (op2, false, &overflow);
984 break;
986 case MULT_HIGHPART_EXPR:
987 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
989 bool dummy_overflow;
990 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
991 return NULL_TREE;
992 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
994 else
996 bool dummy_overflow;
997 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
998 is performed in twice the precision of arguments. */
999 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1000 res = tmp.rshift (TYPE_PRECISION (type),
1001 2 * TYPE_PRECISION (type), !uns);
1003 break;
1005 case TRUNC_DIV_EXPR:
1006 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1007 case EXACT_DIV_EXPR:
1008 /* This is a shortcut for a common special case. */
1009 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1010 && !TREE_OVERFLOW (arg1)
1011 && !TREE_OVERFLOW (arg2)
1012 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1014 if (code == CEIL_DIV_EXPR)
1015 op1.low += op2.low - 1;
1017 res.low = op1.low / op2.low, res.high = 0;
1018 break;
1021 /* ... fall through ... */
1023 case ROUND_DIV_EXPR:
1024 if (op2.is_zero ())
1025 return NULL_TREE;
1026 if (op2.is_one ())
1028 res = op1;
1029 break;
1031 if (op1 == op2 && !op1.is_zero ())
1033 res = double_int_one;
1034 break;
1036 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1037 break;
1039 case TRUNC_MOD_EXPR:
1040 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1041 /* This is a shortcut for a common special case. */
1042 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1043 && !TREE_OVERFLOW (arg1)
1044 && !TREE_OVERFLOW (arg2)
1045 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1047 if (code == CEIL_MOD_EXPR)
1048 op1.low += op2.low - 1;
1049 res.low = op1.low % op2.low, res.high = 0;
1050 break;
1053 /* ... fall through ... */
1055 case ROUND_MOD_EXPR:
1056 if (op2.is_zero ())
1057 return NULL_TREE;
1058 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1059 break;
1061 case MIN_EXPR:
1062 res = op1.min (op2, uns);
1063 break;
1065 case MAX_EXPR:
1066 res = op1.max (op2, uns);
1067 break;
1069 default:
1070 return NULL_TREE;
1073 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1074 (!uns && overflow)
1075 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1077 return t;
1080 tree
1081 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1083 return int_const_binop_1 (code, arg1, arg2, 1);
1086 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1087 constant. We assume ARG1 and ARG2 have the same data type, or at least
1088 are the same kind of constant and the same machine mode. Return zero if
1089 combining the constants is not allowed in the current operating mode. */
1091 static tree
1092 const_binop (enum tree_code code, tree arg1, tree arg2)
1094 /* Sanity check for the recursive cases. */
1095 if (!arg1 || !arg2)
1096 return NULL_TREE;
1098 STRIP_NOPS (arg1);
1099 STRIP_NOPS (arg2);
1101 if (TREE_CODE (arg1) == INTEGER_CST)
1102 return int_const_binop (code, arg1, arg2);
1104 if (TREE_CODE (arg1) == REAL_CST)
1106 enum machine_mode mode;
1107 REAL_VALUE_TYPE d1;
1108 REAL_VALUE_TYPE d2;
1109 REAL_VALUE_TYPE value;
1110 REAL_VALUE_TYPE result;
1111 bool inexact;
1112 tree t, type;
1114 /* The following codes are handled by real_arithmetic. */
1115 switch (code)
1117 case PLUS_EXPR:
1118 case MINUS_EXPR:
1119 case MULT_EXPR:
1120 case RDIV_EXPR:
1121 case MIN_EXPR:
1122 case MAX_EXPR:
1123 break;
1125 default:
1126 return NULL_TREE;
1129 d1 = TREE_REAL_CST (arg1);
1130 d2 = TREE_REAL_CST (arg2);
1132 type = TREE_TYPE (arg1);
1133 mode = TYPE_MODE (type);
1135 /* Don't perform operation if we honor signaling NaNs and
1136 either operand is a NaN. */
1137 if (HONOR_SNANS (mode)
1138 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1139 return NULL_TREE;
1141 /* Don't perform operation if it would raise a division
1142 by zero exception. */
1143 if (code == RDIV_EXPR
1144 && REAL_VALUES_EQUAL (d2, dconst0)
1145 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1146 return NULL_TREE;
1148 /* If either operand is a NaN, just return it. Otherwise, set up
1149 for floating-point trap; we return an overflow. */
1150 if (REAL_VALUE_ISNAN (d1))
1151 return arg1;
1152 else if (REAL_VALUE_ISNAN (d2))
1153 return arg2;
1155 inexact = real_arithmetic (&value, code, &d1, &d2);
1156 real_convert (&result, mode, &value);
1158 /* Don't constant fold this floating point operation if
1159 the result has overflowed and flag_trapping_math. */
1160 if (flag_trapping_math
1161 && MODE_HAS_INFINITIES (mode)
1162 && REAL_VALUE_ISINF (result)
1163 && !REAL_VALUE_ISINF (d1)
1164 && !REAL_VALUE_ISINF (d2))
1165 return NULL_TREE;
1167 /* Don't constant fold this floating point operation if the
1168 result may dependent upon the run-time rounding mode and
1169 flag_rounding_math is set, or if GCC's software emulation
1170 is unable to accurately represent the result. */
1171 if ((flag_rounding_math
1172 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1173 && (inexact || !real_identical (&result, &value)))
1174 return NULL_TREE;
1176 t = build_real (type, result);
1178 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1179 return t;
1182 if (TREE_CODE (arg1) == FIXED_CST)
1184 FIXED_VALUE_TYPE f1;
1185 FIXED_VALUE_TYPE f2;
1186 FIXED_VALUE_TYPE result;
1187 tree t, type;
1188 int sat_p;
1189 bool overflow_p;
1191 /* The following codes are handled by fixed_arithmetic. */
1192 switch (code)
1194 case PLUS_EXPR:
1195 case MINUS_EXPR:
1196 case MULT_EXPR:
1197 case TRUNC_DIV_EXPR:
1198 f2 = TREE_FIXED_CST (arg2);
1199 break;
1201 case LSHIFT_EXPR:
1202 case RSHIFT_EXPR:
1203 f2.data.high = TREE_INT_CST_HIGH (arg2);
1204 f2.data.low = TREE_INT_CST_LOW (arg2);
1205 f2.mode = SImode;
1206 break;
1208 default:
1209 return NULL_TREE;
1212 f1 = TREE_FIXED_CST (arg1);
1213 type = TREE_TYPE (arg1);
1214 sat_p = TYPE_SATURATING (type);
1215 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1216 t = build_fixed (type, result);
1217 /* Propagate overflow flags. */
1218 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1219 TREE_OVERFLOW (t) = 1;
1220 return t;
1223 if (TREE_CODE (arg1) == COMPLEX_CST)
1225 tree type = TREE_TYPE (arg1);
1226 tree r1 = TREE_REALPART (arg1);
1227 tree i1 = TREE_IMAGPART (arg1);
1228 tree r2 = TREE_REALPART (arg2);
1229 tree i2 = TREE_IMAGPART (arg2);
1230 tree real, imag;
1232 switch (code)
1234 case PLUS_EXPR:
1235 case MINUS_EXPR:
1236 real = const_binop (code, r1, r2);
1237 imag = const_binop (code, i1, i2);
1238 break;
1240 case MULT_EXPR:
1241 if (COMPLEX_FLOAT_TYPE_P (type))
1242 return do_mpc_arg2 (arg1, arg2, type,
1243 /* do_nonfinite= */ folding_initializer,
1244 mpc_mul);
1246 real = const_binop (MINUS_EXPR,
1247 const_binop (MULT_EXPR, r1, r2),
1248 const_binop (MULT_EXPR, i1, i2));
1249 imag = const_binop (PLUS_EXPR,
1250 const_binop (MULT_EXPR, r1, i2),
1251 const_binop (MULT_EXPR, i1, r2));
1252 break;
1254 case RDIV_EXPR:
1255 if (COMPLEX_FLOAT_TYPE_P (type))
1256 return do_mpc_arg2 (arg1, arg2, type,
1257 /* do_nonfinite= */ folding_initializer,
1258 mpc_div);
1259 /* Fallthru ... */
1260 case TRUNC_DIV_EXPR:
1261 case CEIL_DIV_EXPR:
1262 case FLOOR_DIV_EXPR:
1263 case ROUND_DIV_EXPR:
1264 if (flag_complex_method == 0)
1266 /* Keep this algorithm in sync with
1267 tree-complex.c:expand_complex_div_straight().
1269 Expand complex division to scalars, straightforward algorithm.
1270 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1271 t = br*br + bi*bi
1273 tree magsquared
1274 = const_binop (PLUS_EXPR,
1275 const_binop (MULT_EXPR, r2, r2),
1276 const_binop (MULT_EXPR, i2, i2));
1277 tree t1
1278 = const_binop (PLUS_EXPR,
1279 const_binop (MULT_EXPR, r1, r2),
1280 const_binop (MULT_EXPR, i1, i2));
1281 tree t2
1282 = const_binop (MINUS_EXPR,
1283 const_binop (MULT_EXPR, i1, r2),
1284 const_binop (MULT_EXPR, r1, i2));
1286 real = const_binop (code, t1, magsquared);
1287 imag = const_binop (code, t2, magsquared);
1289 else
1291 /* Keep this algorithm in sync with
1292 tree-complex.c:expand_complex_div_wide().
1294 Expand complex division to scalars, modified algorithm to minimize
1295 overflow with wide input ranges. */
1296 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1297 fold_abs_const (r2, TREE_TYPE (type)),
1298 fold_abs_const (i2, TREE_TYPE (type)));
1300 if (integer_nonzerop (compare))
1302 /* In the TRUE branch, we compute
1303 ratio = br/bi;
1304 div = (br * ratio) + bi;
1305 tr = (ar * ratio) + ai;
1306 ti = (ai * ratio) - ar;
1307 tr = tr / div;
1308 ti = ti / div; */
1309 tree ratio = const_binop (code, r2, i2);
1310 tree div = const_binop (PLUS_EXPR, i2,
1311 const_binop (MULT_EXPR, r2, ratio));
1312 real = const_binop (MULT_EXPR, r1, ratio);
1313 real = const_binop (PLUS_EXPR, real, i1);
1314 real = const_binop (code, real, div);
1316 imag = const_binop (MULT_EXPR, i1, ratio);
1317 imag = const_binop (MINUS_EXPR, imag, r1);
1318 imag = const_binop (code, imag, div);
1320 else
1322 /* In the FALSE branch, we compute
1323 ratio = d/c;
1324 divisor = (d * ratio) + c;
1325 tr = (b * ratio) + a;
1326 ti = b - (a * ratio);
1327 tr = tr / div;
1328 ti = ti / div; */
1329 tree ratio = const_binop (code, i2, r2);
1330 tree div = const_binop (PLUS_EXPR, r2,
1331 const_binop (MULT_EXPR, i2, ratio));
1333 real = const_binop (MULT_EXPR, i1, ratio);
1334 real = const_binop (PLUS_EXPR, real, r1);
1335 real = const_binop (code, real, div);
1337 imag = const_binop (MULT_EXPR, r1, ratio);
1338 imag = const_binop (MINUS_EXPR, i1, imag);
1339 imag = const_binop (code, imag, div);
1342 break;
1344 default:
1345 return NULL_TREE;
1348 if (real && imag)
1349 return build_complex (type, real, imag);
1352 if (TREE_CODE (arg1) == VECTOR_CST
1353 && TREE_CODE (arg2) == VECTOR_CST)
1355 tree type = TREE_TYPE (arg1);
1356 int count = TYPE_VECTOR_SUBPARTS (type), i;
1357 tree *elts = XALLOCAVEC (tree, count);
1359 for (i = 0; i < count; i++)
1361 tree elem1 = VECTOR_CST_ELT (arg1, i);
1362 tree elem2 = VECTOR_CST_ELT (arg2, i);
1364 elts[i] = const_binop (code, elem1, elem2);
1366 /* It is possible that const_binop cannot handle the given
1367 code and return NULL_TREE */
1368 if (elts[i] == NULL_TREE)
1369 return NULL_TREE;
1372 return build_vector (type, elts);
1375 /* Shifts allow a scalar offset for a vector. */
1376 if (TREE_CODE (arg1) == VECTOR_CST
1377 && TREE_CODE (arg2) == INTEGER_CST)
1379 tree type = TREE_TYPE (arg1);
1380 int count = TYPE_VECTOR_SUBPARTS (type), i;
1381 tree *elts = XALLOCAVEC (tree, count);
1383 if (code == VEC_LSHIFT_EXPR
1384 || code == VEC_RSHIFT_EXPR)
1386 if (!host_integerp (arg2, 1))
1387 return NULL_TREE;
1389 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1390 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1391 unsigned HOST_WIDE_INT innerc
1392 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1393 if (shiftc >= outerc || (shiftc % innerc) != 0)
1394 return NULL_TREE;
1395 int offset = shiftc / innerc;
1396 if (code == VEC_LSHIFT_EXPR)
1397 offset = -offset;
1398 tree zero = build_zero_cst (TREE_TYPE (type));
1399 for (i = 0; i < count; i++)
1401 if (i + offset < 0 || i + offset >= count)
1402 elts[i] = zero;
1403 else
1404 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1407 else
1408 for (i = 0; i < count; i++)
1410 tree elem1 = VECTOR_CST_ELT (arg1, i);
1412 elts[i] = const_binop (code, elem1, arg2);
1414 /* It is possible that const_binop cannot handle the given
1415 code and return NULL_TREE */
1416 if (elts[i] == NULL_TREE)
1417 return NULL_TREE;
1420 return build_vector (type, elts);
1422 return NULL_TREE;
1425 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1426 indicates which particular sizetype to create. */
1428 tree
1429 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1431 return build_int_cst (sizetype_tab[(int) kind], number);
1434 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1435 is a tree code. The type of the result is taken from the operands.
1436 Both must be equivalent integer types, ala int_binop_types_match_p.
1437 If the operands are constant, so is the result. */
1439 tree
1440 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1442 tree type = TREE_TYPE (arg0);
1444 if (arg0 == error_mark_node || arg1 == error_mark_node)
1445 return error_mark_node;
1447 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1448 TREE_TYPE (arg1)));
1450 /* Handle the special case of two integer constants faster. */
1451 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1453 /* And some specific cases even faster than that. */
1454 if (code == PLUS_EXPR)
1456 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1457 return arg1;
1458 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1459 return arg0;
1461 else if (code == MINUS_EXPR)
1463 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1464 return arg0;
1466 else if (code == MULT_EXPR)
1468 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1469 return arg1;
1472 /* Handle general case of two integer constants. For sizetype
1473 constant calculations we always want to know about overflow,
1474 even in the unsigned case. */
1475 return int_const_binop_1 (code, arg0, arg1, -1);
1478 return fold_build2_loc (loc, code, type, arg0, arg1);
1481 /* Given two values, either both of sizetype or both of bitsizetype,
1482 compute the difference between the two values. Return the value
1483 in signed type corresponding to the type of the operands. */
1485 tree
1486 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1488 tree type = TREE_TYPE (arg0);
1489 tree ctype;
1491 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1492 TREE_TYPE (arg1)));
1494 /* If the type is already signed, just do the simple thing. */
1495 if (!TYPE_UNSIGNED (type))
1496 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1498 if (type == sizetype)
1499 ctype = ssizetype;
1500 else if (type == bitsizetype)
1501 ctype = sbitsizetype;
1502 else
1503 ctype = signed_type_for (type);
1505 /* If either operand is not a constant, do the conversions to the signed
1506 type and subtract. The hardware will do the right thing with any
1507 overflow in the subtraction. */
1508 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1509 return size_binop_loc (loc, MINUS_EXPR,
1510 fold_convert_loc (loc, ctype, arg0),
1511 fold_convert_loc (loc, ctype, arg1));
1513 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1514 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1515 overflow) and negate (which can't either). Special-case a result
1516 of zero while we're here. */
1517 if (tree_int_cst_equal (arg0, arg1))
1518 return build_int_cst (ctype, 0);
1519 else if (tree_int_cst_lt (arg1, arg0))
1520 return fold_convert_loc (loc, ctype,
1521 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1522 else
1523 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1524 fold_convert_loc (loc, ctype,
1525 size_binop_loc (loc,
1526 MINUS_EXPR,
1527 arg1, arg0)));
1530 /* A subroutine of fold_convert_const handling conversions of an
1531 INTEGER_CST to another integer type. */
1533 static tree
1534 fold_convert_const_int_from_int (tree type, const_tree arg1)
1536 tree t;
1538 /* Given an integer constant, make new constant with new type,
1539 appropriately sign-extended or truncated. */
1540 t = force_fit_type_double (type, tree_to_double_int (arg1),
1541 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1542 (TREE_INT_CST_HIGH (arg1) < 0
1543 && (TYPE_UNSIGNED (type)
1544 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1545 | TREE_OVERFLOW (arg1));
1547 return t;
1550 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1551 to an integer type. */
1553 static tree
1554 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1556 int overflow = 0;
1557 tree t;
1559 /* The following code implements the floating point to integer
1560 conversion rules required by the Java Language Specification,
1561 that IEEE NaNs are mapped to zero and values that overflow
1562 the target precision saturate, i.e. values greater than
1563 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1564 are mapped to INT_MIN. These semantics are allowed by the
1565 C and C++ standards that simply state that the behavior of
1566 FP-to-integer conversion is unspecified upon overflow. */
1568 double_int val;
1569 REAL_VALUE_TYPE r;
1570 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1572 switch (code)
1574 case FIX_TRUNC_EXPR:
1575 real_trunc (&r, VOIDmode, &x);
1576 break;
1578 default:
1579 gcc_unreachable ();
1582 /* If R is NaN, return zero and show we have an overflow. */
1583 if (REAL_VALUE_ISNAN (r))
1585 overflow = 1;
1586 val = double_int_zero;
1589 /* See if R is less than the lower bound or greater than the
1590 upper bound. */
1592 if (! overflow)
1594 tree lt = TYPE_MIN_VALUE (type);
1595 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1596 if (REAL_VALUES_LESS (r, l))
1598 overflow = 1;
1599 val = tree_to_double_int (lt);
1603 if (! overflow)
1605 tree ut = TYPE_MAX_VALUE (type);
1606 if (ut)
1608 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1609 if (REAL_VALUES_LESS (u, r))
1611 overflow = 1;
1612 val = tree_to_double_int (ut);
1617 if (! overflow)
1618 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1620 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1621 return t;
1624 /* A subroutine of fold_convert_const handling conversions of a
1625 FIXED_CST to an integer type. */
1627 static tree
1628 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1630 tree t;
1631 double_int temp, temp_trunc;
1632 unsigned int mode;
1634 /* Right shift FIXED_CST to temp by fbit. */
1635 temp = TREE_FIXED_CST (arg1).data;
1636 mode = TREE_FIXED_CST (arg1).mode;
1637 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1639 temp = temp.rshift (GET_MODE_FBIT (mode),
1640 HOST_BITS_PER_DOUBLE_INT,
1641 SIGNED_FIXED_POINT_MODE_P (mode));
1643 /* Left shift temp to temp_trunc by fbit. */
1644 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1645 HOST_BITS_PER_DOUBLE_INT,
1646 SIGNED_FIXED_POINT_MODE_P (mode));
1648 else
1650 temp = double_int_zero;
1651 temp_trunc = double_int_zero;
1654 /* If FIXED_CST is negative, we need to round the value toward 0.
1655 By checking if the fractional bits are not zero to add 1 to temp. */
1656 if (SIGNED_FIXED_POINT_MODE_P (mode)
1657 && temp_trunc.is_negative ()
1658 && TREE_FIXED_CST (arg1).data != temp_trunc)
1659 temp += double_int_one;
1661 /* Given a fixed-point constant, make new constant with new type,
1662 appropriately sign-extended or truncated. */
1663 t = force_fit_type_double (type, temp, -1,
1664 (temp.is_negative ()
1665 && (TYPE_UNSIGNED (type)
1666 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1667 | TREE_OVERFLOW (arg1));
1669 return t;
1672 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1673 to another floating point type. */
1675 static tree
1676 fold_convert_const_real_from_real (tree type, const_tree arg1)
1678 REAL_VALUE_TYPE value;
1679 tree t;
1681 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1682 t = build_real (type, value);
1684 /* If converting an infinity or NAN to a representation that doesn't
1685 have one, set the overflow bit so that we can produce some kind of
1686 error message at the appropriate point if necessary. It's not the
1687 most user-friendly message, but it's better than nothing. */
1688 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1689 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1690 TREE_OVERFLOW (t) = 1;
1691 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1692 && !MODE_HAS_NANS (TYPE_MODE (type)))
1693 TREE_OVERFLOW (t) = 1;
1694 /* Regular overflow, conversion produced an infinity in a mode that
1695 can't represent them. */
1696 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1697 && REAL_VALUE_ISINF (value)
1698 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1699 TREE_OVERFLOW (t) = 1;
1700 else
1701 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1702 return t;
1705 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1706 to a floating point type. */
1708 static tree
1709 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1711 REAL_VALUE_TYPE value;
1712 tree t;
1714 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1715 t = build_real (type, value);
1717 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1718 return t;
1721 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1722 to another fixed-point type. */
1724 static tree
1725 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1727 FIXED_VALUE_TYPE value;
1728 tree t;
1729 bool overflow_p;
1731 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1732 TYPE_SATURATING (type));
1733 t = build_fixed (type, value);
1735 /* Propagate overflow flags. */
1736 if (overflow_p | TREE_OVERFLOW (arg1))
1737 TREE_OVERFLOW (t) = 1;
1738 return t;
1741 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1742 to a fixed-point type. */
1744 static tree
1745 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1747 FIXED_VALUE_TYPE value;
1748 tree t;
1749 bool overflow_p;
1751 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1752 TREE_INT_CST (arg1),
1753 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1754 TYPE_SATURATING (type));
1755 t = build_fixed (type, value);
1757 /* Propagate overflow flags. */
1758 if (overflow_p | TREE_OVERFLOW (arg1))
1759 TREE_OVERFLOW (t) = 1;
1760 return t;
1763 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1764 to a fixed-point type. */
1766 static tree
1767 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1769 FIXED_VALUE_TYPE value;
1770 tree t;
1771 bool overflow_p;
1773 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1774 &TREE_REAL_CST (arg1),
1775 TYPE_SATURATING (type));
1776 t = build_fixed (type, value);
1778 /* Propagate overflow flags. */
1779 if (overflow_p | TREE_OVERFLOW (arg1))
1780 TREE_OVERFLOW (t) = 1;
1781 return t;
1784 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1785 type TYPE. If no simplification can be done return NULL_TREE. */
1787 static tree
1788 fold_convert_const (enum tree_code code, tree type, tree arg1)
1790 if (TREE_TYPE (arg1) == type)
1791 return arg1;
1793 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1794 || TREE_CODE (type) == OFFSET_TYPE)
1796 if (TREE_CODE (arg1) == INTEGER_CST)
1797 return fold_convert_const_int_from_int (type, arg1);
1798 else if (TREE_CODE (arg1) == REAL_CST)
1799 return fold_convert_const_int_from_real (code, type, arg1);
1800 else if (TREE_CODE (arg1) == FIXED_CST)
1801 return fold_convert_const_int_from_fixed (type, arg1);
1803 else if (TREE_CODE (type) == REAL_TYPE)
1805 if (TREE_CODE (arg1) == INTEGER_CST)
1806 return build_real_from_int_cst (type, arg1);
1807 else if (TREE_CODE (arg1) == REAL_CST)
1808 return fold_convert_const_real_from_real (type, arg1);
1809 else if (TREE_CODE (arg1) == FIXED_CST)
1810 return fold_convert_const_real_from_fixed (type, arg1);
1812 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1814 if (TREE_CODE (arg1) == FIXED_CST)
1815 return fold_convert_const_fixed_from_fixed (type, arg1);
1816 else if (TREE_CODE (arg1) == INTEGER_CST)
1817 return fold_convert_const_fixed_from_int (type, arg1);
1818 else if (TREE_CODE (arg1) == REAL_CST)
1819 return fold_convert_const_fixed_from_real (type, arg1);
1821 return NULL_TREE;
1824 /* Construct a vector of zero elements of vector type TYPE. */
1826 static tree
1827 build_zero_vector (tree type)
1829 tree t;
1831 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1832 return build_vector_from_val (type, t);
1835 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1837 bool
1838 fold_convertible_p (const_tree type, const_tree arg)
1840 tree orig = TREE_TYPE (arg);
1842 if (type == orig)
1843 return true;
1845 if (TREE_CODE (arg) == ERROR_MARK
1846 || TREE_CODE (type) == ERROR_MARK
1847 || TREE_CODE (orig) == ERROR_MARK)
1848 return false;
1850 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1851 return true;
1853 switch (TREE_CODE (type))
1855 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1856 case POINTER_TYPE: case REFERENCE_TYPE:
1857 case OFFSET_TYPE:
1858 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1859 || TREE_CODE (orig) == OFFSET_TYPE)
1860 return true;
1861 return (TREE_CODE (orig) == VECTOR_TYPE
1862 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1864 case REAL_TYPE:
1865 case FIXED_POINT_TYPE:
1866 case COMPLEX_TYPE:
1867 case VECTOR_TYPE:
1868 case VOID_TYPE:
1869 return TREE_CODE (type) == TREE_CODE (orig);
1871 default:
1872 return false;
1876 /* Convert expression ARG to type TYPE. Used by the middle-end for
1877 simple conversions in preference to calling the front-end's convert. */
1879 tree
1880 fold_convert_loc (location_t loc, tree type, tree arg)
1882 tree orig = TREE_TYPE (arg);
1883 tree tem;
1885 if (type == orig)
1886 return arg;
1888 if (TREE_CODE (arg) == ERROR_MARK
1889 || TREE_CODE (type) == ERROR_MARK
1890 || TREE_CODE (orig) == ERROR_MARK)
1891 return error_mark_node;
1893 switch (TREE_CODE (type))
1895 case POINTER_TYPE:
1896 case REFERENCE_TYPE:
1897 /* Handle conversions between pointers to different address spaces. */
1898 if (POINTER_TYPE_P (orig)
1899 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1900 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1901 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1902 /* fall through */
1904 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1905 case OFFSET_TYPE:
1906 if (TREE_CODE (arg) == INTEGER_CST)
1908 tem = fold_convert_const (NOP_EXPR, type, arg);
1909 if (tem != NULL_TREE)
1910 return tem;
1912 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1913 || TREE_CODE (orig) == OFFSET_TYPE)
1914 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1915 if (TREE_CODE (orig) == COMPLEX_TYPE)
1916 return fold_convert_loc (loc, type,
1917 fold_build1_loc (loc, REALPART_EXPR,
1918 TREE_TYPE (orig), arg));
1919 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1920 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1921 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1923 case REAL_TYPE:
1924 if (TREE_CODE (arg) == INTEGER_CST)
1926 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1927 if (tem != NULL_TREE)
1928 return tem;
1930 else if (TREE_CODE (arg) == REAL_CST)
1932 tem = fold_convert_const (NOP_EXPR, type, arg);
1933 if (tem != NULL_TREE)
1934 return tem;
1936 else if (TREE_CODE (arg) == FIXED_CST)
1938 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1939 if (tem != NULL_TREE)
1940 return tem;
1943 switch (TREE_CODE (orig))
1945 case INTEGER_TYPE:
1946 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1947 case POINTER_TYPE: case REFERENCE_TYPE:
1948 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1950 case REAL_TYPE:
1951 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1953 case FIXED_POINT_TYPE:
1954 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1956 case COMPLEX_TYPE:
1957 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1958 return fold_convert_loc (loc, type, tem);
1960 default:
1961 gcc_unreachable ();
1964 case FIXED_POINT_TYPE:
1965 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1966 || TREE_CODE (arg) == REAL_CST)
1968 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1969 if (tem != NULL_TREE)
1970 goto fold_convert_exit;
1973 switch (TREE_CODE (orig))
1975 case FIXED_POINT_TYPE:
1976 case INTEGER_TYPE:
1977 case ENUMERAL_TYPE:
1978 case BOOLEAN_TYPE:
1979 case REAL_TYPE:
1980 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1982 case COMPLEX_TYPE:
1983 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1984 return fold_convert_loc (loc, type, tem);
1986 default:
1987 gcc_unreachable ();
1990 case COMPLEX_TYPE:
1991 switch (TREE_CODE (orig))
1993 case INTEGER_TYPE:
1994 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1995 case POINTER_TYPE: case REFERENCE_TYPE:
1996 case REAL_TYPE:
1997 case FIXED_POINT_TYPE:
1998 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1999 fold_convert_loc (loc, TREE_TYPE (type), arg),
2000 fold_convert_loc (loc, TREE_TYPE (type),
2001 integer_zero_node));
2002 case COMPLEX_TYPE:
2004 tree rpart, ipart;
2006 if (TREE_CODE (arg) == COMPLEX_EXPR)
2008 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2009 TREE_OPERAND (arg, 0));
2010 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2011 TREE_OPERAND (arg, 1));
2012 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2015 arg = save_expr (arg);
2016 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2017 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2018 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2019 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2020 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2023 default:
2024 gcc_unreachable ();
2027 case VECTOR_TYPE:
2028 if (integer_zerop (arg))
2029 return build_zero_vector (type);
2030 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2031 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2032 || TREE_CODE (orig) == VECTOR_TYPE);
2033 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2035 case VOID_TYPE:
2036 tem = fold_ignored_result (arg);
2037 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2039 default:
2040 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2041 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2042 gcc_unreachable ();
2044 fold_convert_exit:
2045 protected_set_expr_location_unshare (tem, loc);
2046 return tem;
2049 /* Return false if expr can be assumed not to be an lvalue, true
2050 otherwise. */
2052 static bool
2053 maybe_lvalue_p (const_tree x)
2055 /* We only need to wrap lvalue tree codes. */
2056 switch (TREE_CODE (x))
2058 case VAR_DECL:
2059 case PARM_DECL:
2060 case RESULT_DECL:
2061 case LABEL_DECL:
2062 case FUNCTION_DECL:
2063 case SSA_NAME:
2065 case COMPONENT_REF:
2066 case MEM_REF:
2067 case INDIRECT_REF:
2068 case ARRAY_REF:
2069 case ARRAY_RANGE_REF:
2070 case BIT_FIELD_REF:
2071 case OBJ_TYPE_REF:
2073 case REALPART_EXPR:
2074 case IMAGPART_EXPR:
2075 case PREINCREMENT_EXPR:
2076 case PREDECREMENT_EXPR:
2077 case SAVE_EXPR:
2078 case TRY_CATCH_EXPR:
2079 case WITH_CLEANUP_EXPR:
2080 case COMPOUND_EXPR:
2081 case MODIFY_EXPR:
2082 case TARGET_EXPR:
2083 case COND_EXPR:
2084 case BIND_EXPR:
2085 break;
2087 default:
2088 /* Assume the worst for front-end tree codes. */
2089 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2090 break;
2091 return false;
2094 return true;
2097 /* Return an expr equal to X but certainly not valid as an lvalue. */
2099 tree
2100 non_lvalue_loc (location_t loc, tree x)
2102 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2103 us. */
2104 if (in_gimple_form)
2105 return x;
2107 if (! maybe_lvalue_p (x))
2108 return x;
2109 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2112 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2113 Zero means allow extended lvalues. */
2115 int pedantic_lvalues;
2117 /* When pedantic, return an expr equal to X but certainly not valid as a
2118 pedantic lvalue. Otherwise, return X. */
2120 static tree
2121 pedantic_non_lvalue_loc (location_t loc, tree x)
2123 if (pedantic_lvalues)
2124 return non_lvalue_loc (loc, x);
2126 return protected_set_expr_location_unshare (x, loc);
2129 /* Given a tree comparison code, return the code that is the logical inverse.
2130 It is generally not safe to do this for floating-point comparisons, except
2131 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2132 ERROR_MARK in this case. */
2134 enum tree_code
2135 invert_tree_comparison (enum tree_code code, bool honor_nans)
2137 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2138 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2139 return ERROR_MARK;
2141 switch (code)
2143 case EQ_EXPR:
2144 return NE_EXPR;
2145 case NE_EXPR:
2146 return EQ_EXPR;
2147 case GT_EXPR:
2148 return honor_nans ? UNLE_EXPR : LE_EXPR;
2149 case GE_EXPR:
2150 return honor_nans ? UNLT_EXPR : LT_EXPR;
2151 case LT_EXPR:
2152 return honor_nans ? UNGE_EXPR : GE_EXPR;
2153 case LE_EXPR:
2154 return honor_nans ? UNGT_EXPR : GT_EXPR;
2155 case LTGT_EXPR:
2156 return UNEQ_EXPR;
2157 case UNEQ_EXPR:
2158 return LTGT_EXPR;
2159 case UNGT_EXPR:
2160 return LE_EXPR;
2161 case UNGE_EXPR:
2162 return LT_EXPR;
2163 case UNLT_EXPR:
2164 return GE_EXPR;
2165 case UNLE_EXPR:
2166 return GT_EXPR;
2167 case ORDERED_EXPR:
2168 return UNORDERED_EXPR;
2169 case UNORDERED_EXPR:
2170 return ORDERED_EXPR;
2171 default:
2172 gcc_unreachable ();
2176 /* Similar, but return the comparison that results if the operands are
2177 swapped. This is safe for floating-point. */
2179 enum tree_code
2180 swap_tree_comparison (enum tree_code code)
2182 switch (code)
2184 case EQ_EXPR:
2185 case NE_EXPR:
2186 case ORDERED_EXPR:
2187 case UNORDERED_EXPR:
2188 case LTGT_EXPR:
2189 case UNEQ_EXPR:
2190 return code;
2191 case GT_EXPR:
2192 return LT_EXPR;
2193 case GE_EXPR:
2194 return LE_EXPR;
2195 case LT_EXPR:
2196 return GT_EXPR;
2197 case LE_EXPR:
2198 return GE_EXPR;
2199 case UNGT_EXPR:
2200 return UNLT_EXPR;
2201 case UNGE_EXPR:
2202 return UNLE_EXPR;
2203 case UNLT_EXPR:
2204 return UNGT_EXPR;
2205 case UNLE_EXPR:
2206 return UNGE_EXPR;
2207 default:
2208 gcc_unreachable ();
2213 /* Convert a comparison tree code from an enum tree_code representation
2214 into a compcode bit-based encoding. This function is the inverse of
2215 compcode_to_comparison. */
2217 static enum comparison_code
2218 comparison_to_compcode (enum tree_code code)
2220 switch (code)
2222 case LT_EXPR:
2223 return COMPCODE_LT;
2224 case EQ_EXPR:
2225 return COMPCODE_EQ;
2226 case LE_EXPR:
2227 return COMPCODE_LE;
2228 case GT_EXPR:
2229 return COMPCODE_GT;
2230 case NE_EXPR:
2231 return COMPCODE_NE;
2232 case GE_EXPR:
2233 return COMPCODE_GE;
2234 case ORDERED_EXPR:
2235 return COMPCODE_ORD;
2236 case UNORDERED_EXPR:
2237 return COMPCODE_UNORD;
2238 case UNLT_EXPR:
2239 return COMPCODE_UNLT;
2240 case UNEQ_EXPR:
2241 return COMPCODE_UNEQ;
2242 case UNLE_EXPR:
2243 return COMPCODE_UNLE;
2244 case UNGT_EXPR:
2245 return COMPCODE_UNGT;
2246 case LTGT_EXPR:
2247 return COMPCODE_LTGT;
2248 case UNGE_EXPR:
2249 return COMPCODE_UNGE;
2250 default:
2251 gcc_unreachable ();
2255 /* Convert a compcode bit-based encoding of a comparison operator back
2256 to GCC's enum tree_code representation. This function is the
2257 inverse of comparison_to_compcode. */
2259 static enum tree_code
2260 compcode_to_comparison (enum comparison_code code)
2262 switch (code)
2264 case COMPCODE_LT:
2265 return LT_EXPR;
2266 case COMPCODE_EQ:
2267 return EQ_EXPR;
2268 case COMPCODE_LE:
2269 return LE_EXPR;
2270 case COMPCODE_GT:
2271 return GT_EXPR;
2272 case COMPCODE_NE:
2273 return NE_EXPR;
2274 case COMPCODE_GE:
2275 return GE_EXPR;
2276 case COMPCODE_ORD:
2277 return ORDERED_EXPR;
2278 case COMPCODE_UNORD:
2279 return UNORDERED_EXPR;
2280 case COMPCODE_UNLT:
2281 return UNLT_EXPR;
2282 case COMPCODE_UNEQ:
2283 return UNEQ_EXPR;
2284 case COMPCODE_UNLE:
2285 return UNLE_EXPR;
2286 case COMPCODE_UNGT:
2287 return UNGT_EXPR;
2288 case COMPCODE_LTGT:
2289 return LTGT_EXPR;
2290 case COMPCODE_UNGE:
2291 return UNGE_EXPR;
2292 default:
2293 gcc_unreachable ();
2297 /* Return a tree for the comparison which is the combination of
2298 doing the AND or OR (depending on CODE) of the two operations LCODE
2299 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2300 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2301 if this makes the transformation invalid. */
2303 tree
2304 combine_comparisons (location_t loc,
2305 enum tree_code code, enum tree_code lcode,
2306 enum tree_code rcode, tree truth_type,
2307 tree ll_arg, tree lr_arg)
2309 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2310 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2311 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2312 int compcode;
2314 switch (code)
2316 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2317 compcode = lcompcode & rcompcode;
2318 break;
2320 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2321 compcode = lcompcode | rcompcode;
2322 break;
2324 default:
2325 return NULL_TREE;
2328 if (!honor_nans)
2330 /* Eliminate unordered comparisons, as well as LTGT and ORD
2331 which are not used unless the mode has NaNs. */
2332 compcode &= ~COMPCODE_UNORD;
2333 if (compcode == COMPCODE_LTGT)
2334 compcode = COMPCODE_NE;
2335 else if (compcode == COMPCODE_ORD)
2336 compcode = COMPCODE_TRUE;
2338 else if (flag_trapping_math)
2340 /* Check that the original operation and the optimized ones will trap
2341 under the same condition. */
2342 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2343 && (lcompcode != COMPCODE_EQ)
2344 && (lcompcode != COMPCODE_ORD);
2345 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2346 && (rcompcode != COMPCODE_EQ)
2347 && (rcompcode != COMPCODE_ORD);
2348 bool trap = (compcode & COMPCODE_UNORD) == 0
2349 && (compcode != COMPCODE_EQ)
2350 && (compcode != COMPCODE_ORD);
2352 /* In a short-circuited boolean expression the LHS might be
2353 such that the RHS, if evaluated, will never trap. For
2354 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2355 if neither x nor y is NaN. (This is a mixed blessing: for
2356 example, the expression above will never trap, hence
2357 optimizing it to x < y would be invalid). */
2358 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2359 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2360 rtrap = false;
2362 /* If the comparison was short-circuited, and only the RHS
2363 trapped, we may now generate a spurious trap. */
2364 if (rtrap && !ltrap
2365 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2366 return NULL_TREE;
2368 /* If we changed the conditions that cause a trap, we lose. */
2369 if ((ltrap || rtrap) != trap)
2370 return NULL_TREE;
2373 if (compcode == COMPCODE_TRUE)
2374 return constant_boolean_node (true, truth_type);
2375 else if (compcode == COMPCODE_FALSE)
2376 return constant_boolean_node (false, truth_type);
2377 else
2379 enum tree_code tcode;
2381 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2382 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2386 /* Return nonzero if two operands (typically of the same tree node)
2387 are necessarily equal. If either argument has side-effects this
2388 function returns zero. FLAGS modifies behavior as follows:
2390 If OEP_ONLY_CONST is set, only return nonzero for constants.
2391 This function tests whether the operands are indistinguishable;
2392 it does not test whether they are equal using C's == operation.
2393 The distinction is important for IEEE floating point, because
2394 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2395 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2397 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2398 even though it may hold multiple values during a function.
2399 This is because a GCC tree node guarantees that nothing else is
2400 executed between the evaluation of its "operands" (which may often
2401 be evaluated in arbitrary order). Hence if the operands themselves
2402 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2403 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2404 unset means assuming isochronic (or instantaneous) tree equivalence.
2405 Unless comparing arbitrary expression trees, such as from different
2406 statements, this flag can usually be left unset.
2408 If OEP_PURE_SAME is set, then pure functions with identical arguments
2409 are considered the same. It is used when the caller has other ways
2410 to ensure that global memory is unchanged in between. */
2413 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2415 /* If either is ERROR_MARK, they aren't equal. */
2416 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2417 || TREE_TYPE (arg0) == error_mark_node
2418 || TREE_TYPE (arg1) == error_mark_node)
2419 return 0;
2421 /* Similar, if either does not have a type (like a released SSA name),
2422 they aren't equal. */
2423 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2424 return 0;
2426 /* Check equality of integer constants before bailing out due to
2427 precision differences. */
2428 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2429 return tree_int_cst_equal (arg0, arg1);
2431 /* If both types don't have the same signedness, then we can't consider
2432 them equal. We must check this before the STRIP_NOPS calls
2433 because they may change the signedness of the arguments. As pointers
2434 strictly don't have a signedness, require either two pointers or
2435 two non-pointers as well. */
2436 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2437 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2438 return 0;
2440 /* We cannot consider pointers to different address space equal. */
2441 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2442 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2443 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2444 return 0;
2446 /* If both types don't have the same precision, then it is not safe
2447 to strip NOPs. */
2448 if (element_precision (TREE_TYPE (arg0))
2449 != element_precision (TREE_TYPE (arg1)))
2450 return 0;
2452 STRIP_NOPS (arg0);
2453 STRIP_NOPS (arg1);
2455 /* In case both args are comparisons but with different comparison
2456 code, try to swap the comparison operands of one arg to produce
2457 a match and compare that variant. */
2458 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2459 && COMPARISON_CLASS_P (arg0)
2460 && COMPARISON_CLASS_P (arg1))
2462 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2464 if (TREE_CODE (arg0) == swap_code)
2465 return operand_equal_p (TREE_OPERAND (arg0, 0),
2466 TREE_OPERAND (arg1, 1), flags)
2467 && operand_equal_p (TREE_OPERAND (arg0, 1),
2468 TREE_OPERAND (arg1, 0), flags);
2471 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2472 /* This is needed for conversions and for COMPONENT_REF.
2473 Might as well play it safe and always test this. */
2474 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2475 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2476 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2477 return 0;
2479 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2480 We don't care about side effects in that case because the SAVE_EXPR
2481 takes care of that for us. In all other cases, two expressions are
2482 equal if they have no side effects. If we have two identical
2483 expressions with side effects that should be treated the same due
2484 to the only side effects being identical SAVE_EXPR's, that will
2485 be detected in the recursive calls below.
2486 If we are taking an invariant address of two identical objects
2487 they are necessarily equal as well. */
2488 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2489 && (TREE_CODE (arg0) == SAVE_EXPR
2490 || (flags & OEP_CONSTANT_ADDRESS_OF)
2491 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2492 return 1;
2494 /* Next handle constant cases, those for which we can return 1 even
2495 if ONLY_CONST is set. */
2496 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2497 switch (TREE_CODE (arg0))
2499 case INTEGER_CST:
2500 return tree_int_cst_equal (arg0, arg1);
2502 case FIXED_CST:
2503 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2504 TREE_FIXED_CST (arg1));
2506 case REAL_CST:
2507 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2508 TREE_REAL_CST (arg1)))
2509 return 1;
2512 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2514 /* If we do not distinguish between signed and unsigned zero,
2515 consider them equal. */
2516 if (real_zerop (arg0) && real_zerop (arg1))
2517 return 1;
2519 return 0;
2521 case VECTOR_CST:
2523 unsigned i;
2525 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2526 return 0;
2528 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2530 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2531 VECTOR_CST_ELT (arg1, i), flags))
2532 return 0;
2534 return 1;
2537 case COMPLEX_CST:
2538 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2539 flags)
2540 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2541 flags));
2543 case STRING_CST:
2544 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2545 && ! memcmp (TREE_STRING_POINTER (arg0),
2546 TREE_STRING_POINTER (arg1),
2547 TREE_STRING_LENGTH (arg0)));
2549 case ADDR_EXPR:
2550 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2551 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2552 ? OEP_CONSTANT_ADDRESS_OF : 0);
2553 default:
2554 break;
2557 if (flags & OEP_ONLY_CONST)
2558 return 0;
2560 /* Define macros to test an operand from arg0 and arg1 for equality and a
2561 variant that allows null and views null as being different from any
2562 non-null value. In the latter case, if either is null, the both
2563 must be; otherwise, do the normal comparison. */
2564 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2565 TREE_OPERAND (arg1, N), flags)
2567 #define OP_SAME_WITH_NULL(N) \
2568 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2569 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2571 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2573 case tcc_unary:
2574 /* Two conversions are equal only if signedness and modes match. */
2575 switch (TREE_CODE (arg0))
2577 CASE_CONVERT:
2578 case FIX_TRUNC_EXPR:
2579 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2580 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2581 return 0;
2582 break;
2583 default:
2584 break;
2587 return OP_SAME (0);
2590 case tcc_comparison:
2591 case tcc_binary:
2592 if (OP_SAME (0) && OP_SAME (1))
2593 return 1;
2595 /* For commutative ops, allow the other order. */
2596 return (commutative_tree_code (TREE_CODE (arg0))
2597 && operand_equal_p (TREE_OPERAND (arg0, 0),
2598 TREE_OPERAND (arg1, 1), flags)
2599 && operand_equal_p (TREE_OPERAND (arg0, 1),
2600 TREE_OPERAND (arg1, 0), flags));
2602 case tcc_reference:
2603 /* If either of the pointer (or reference) expressions we are
2604 dereferencing contain a side effect, these cannot be equal,
2605 but their addresses can be. */
2606 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2607 && (TREE_SIDE_EFFECTS (arg0)
2608 || TREE_SIDE_EFFECTS (arg1)))
2609 return 0;
2611 switch (TREE_CODE (arg0))
2613 case INDIRECT_REF:
2614 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2615 return OP_SAME (0);
2617 case REALPART_EXPR:
2618 case IMAGPART_EXPR:
2619 return OP_SAME (0);
2621 case TARGET_MEM_REF:
2622 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2623 /* Require equal extra operands and then fall through to MEM_REF
2624 handling of the two common operands. */
2625 if (!OP_SAME_WITH_NULL (2)
2626 || !OP_SAME_WITH_NULL (3)
2627 || !OP_SAME_WITH_NULL (4))
2628 return 0;
2629 /* Fallthru. */
2630 case MEM_REF:
2631 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2632 /* Require equal access sizes, and similar pointer types.
2633 We can have incomplete types for array references of
2634 variable-sized arrays from the Fortran frontend
2635 though. Also verify the types are compatible. */
2636 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2637 || (TYPE_SIZE (TREE_TYPE (arg0))
2638 && TYPE_SIZE (TREE_TYPE (arg1))
2639 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2640 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2641 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2642 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2643 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2644 && OP_SAME (0) && OP_SAME (1));
2646 case ARRAY_REF:
2647 case ARRAY_RANGE_REF:
2648 /* Operands 2 and 3 may be null.
2649 Compare the array index by value if it is constant first as we
2650 may have different types but same value here. */
2651 if (!OP_SAME (0))
2652 return 0;
2653 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2654 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2655 TREE_OPERAND (arg1, 1))
2656 || OP_SAME (1))
2657 && OP_SAME_WITH_NULL (2)
2658 && OP_SAME_WITH_NULL (3));
2660 case COMPONENT_REF:
2661 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2662 may be NULL when we're called to compare MEM_EXPRs. */
2663 if (!OP_SAME_WITH_NULL (0))
2664 return 0;
2665 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2666 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2668 case BIT_FIELD_REF:
2669 if (!OP_SAME (0))
2670 return 0;
2671 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2672 return OP_SAME (1) && OP_SAME (2);
2674 default:
2675 return 0;
2678 case tcc_expression:
2679 switch (TREE_CODE (arg0))
2681 case ADDR_EXPR:
2682 case TRUTH_NOT_EXPR:
2683 return OP_SAME (0);
2685 case TRUTH_ANDIF_EXPR:
2686 case TRUTH_ORIF_EXPR:
2687 return OP_SAME (0) && OP_SAME (1);
2689 case FMA_EXPR:
2690 case WIDEN_MULT_PLUS_EXPR:
2691 case WIDEN_MULT_MINUS_EXPR:
2692 if (!OP_SAME (2))
2693 return 0;
2694 /* The multiplcation operands are commutative. */
2695 /* FALLTHRU */
2697 case TRUTH_AND_EXPR:
2698 case TRUTH_OR_EXPR:
2699 case TRUTH_XOR_EXPR:
2700 if (OP_SAME (0) && OP_SAME (1))
2701 return 1;
2703 /* Otherwise take into account this is a commutative operation. */
2704 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2705 TREE_OPERAND (arg1, 1), flags)
2706 && operand_equal_p (TREE_OPERAND (arg0, 1),
2707 TREE_OPERAND (arg1, 0), flags));
2709 case COND_EXPR:
2710 case VEC_COND_EXPR:
2711 case DOT_PROD_EXPR:
2712 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2714 default:
2715 return 0;
2718 case tcc_vl_exp:
2719 switch (TREE_CODE (arg0))
2721 case CALL_EXPR:
2722 /* If the CALL_EXPRs call different functions, then they
2723 clearly can not be equal. */
2724 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2725 flags))
2726 return 0;
2729 unsigned int cef = call_expr_flags (arg0);
2730 if (flags & OEP_PURE_SAME)
2731 cef &= ECF_CONST | ECF_PURE;
2732 else
2733 cef &= ECF_CONST;
2734 if (!cef)
2735 return 0;
2738 /* Now see if all the arguments are the same. */
2740 const_call_expr_arg_iterator iter0, iter1;
2741 const_tree a0, a1;
2742 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2743 a1 = first_const_call_expr_arg (arg1, &iter1);
2744 a0 && a1;
2745 a0 = next_const_call_expr_arg (&iter0),
2746 a1 = next_const_call_expr_arg (&iter1))
2747 if (! operand_equal_p (a0, a1, flags))
2748 return 0;
2750 /* If we get here and both argument lists are exhausted
2751 then the CALL_EXPRs are equal. */
2752 return ! (a0 || a1);
2754 default:
2755 return 0;
2758 case tcc_declaration:
2759 /* Consider __builtin_sqrt equal to sqrt. */
2760 return (TREE_CODE (arg0) == FUNCTION_DECL
2761 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2762 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2763 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2765 default:
2766 return 0;
2769 #undef OP_SAME
2770 #undef OP_SAME_WITH_NULL
2773 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2774 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2776 When in doubt, return 0. */
2778 static int
2779 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2781 int unsignedp1, unsignedpo;
2782 tree primarg0, primarg1, primother;
2783 unsigned int correct_width;
2785 if (operand_equal_p (arg0, arg1, 0))
2786 return 1;
2788 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2789 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2790 return 0;
2792 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2793 and see if the inner values are the same. This removes any
2794 signedness comparison, which doesn't matter here. */
2795 primarg0 = arg0, primarg1 = arg1;
2796 STRIP_NOPS (primarg0);
2797 STRIP_NOPS (primarg1);
2798 if (operand_equal_p (primarg0, primarg1, 0))
2799 return 1;
2801 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2802 actual comparison operand, ARG0.
2804 First throw away any conversions to wider types
2805 already present in the operands. */
2807 primarg1 = get_narrower (arg1, &unsignedp1);
2808 primother = get_narrower (other, &unsignedpo);
2810 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2811 if (unsignedp1 == unsignedpo
2812 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2813 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2815 tree type = TREE_TYPE (arg0);
2817 /* Make sure shorter operand is extended the right way
2818 to match the longer operand. */
2819 primarg1 = fold_convert (signed_or_unsigned_type_for
2820 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2822 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2823 return 1;
2826 return 0;
2829 /* See if ARG is an expression that is either a comparison or is performing
2830 arithmetic on comparisons. The comparisons must only be comparing
2831 two different values, which will be stored in *CVAL1 and *CVAL2; if
2832 they are nonzero it means that some operands have already been found.
2833 No variables may be used anywhere else in the expression except in the
2834 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2835 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2837 If this is true, return 1. Otherwise, return zero. */
2839 static int
2840 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2842 enum tree_code code = TREE_CODE (arg);
2843 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2845 /* We can handle some of the tcc_expression cases here. */
2846 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2847 tclass = tcc_unary;
2848 else if (tclass == tcc_expression
2849 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2850 || code == COMPOUND_EXPR))
2851 tclass = tcc_binary;
2853 else if (tclass == tcc_expression && code == SAVE_EXPR
2854 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2856 /* If we've already found a CVAL1 or CVAL2, this expression is
2857 two complex to handle. */
2858 if (*cval1 || *cval2)
2859 return 0;
2861 tclass = tcc_unary;
2862 *save_p = 1;
2865 switch (tclass)
2867 case tcc_unary:
2868 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2870 case tcc_binary:
2871 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2872 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2873 cval1, cval2, save_p));
2875 case tcc_constant:
2876 return 1;
2878 case tcc_expression:
2879 if (code == COND_EXPR)
2880 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2881 cval1, cval2, save_p)
2882 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2883 cval1, cval2, save_p)
2884 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2885 cval1, cval2, save_p));
2886 return 0;
2888 case tcc_comparison:
2889 /* First see if we can handle the first operand, then the second. For
2890 the second operand, we know *CVAL1 can't be zero. It must be that
2891 one side of the comparison is each of the values; test for the
2892 case where this isn't true by failing if the two operands
2893 are the same. */
2895 if (operand_equal_p (TREE_OPERAND (arg, 0),
2896 TREE_OPERAND (arg, 1), 0))
2897 return 0;
2899 if (*cval1 == 0)
2900 *cval1 = TREE_OPERAND (arg, 0);
2901 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2903 else if (*cval2 == 0)
2904 *cval2 = TREE_OPERAND (arg, 0);
2905 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2907 else
2908 return 0;
2910 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2912 else if (*cval2 == 0)
2913 *cval2 = TREE_OPERAND (arg, 1);
2914 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2916 else
2917 return 0;
2919 return 1;
2921 default:
2922 return 0;
2926 /* ARG is a tree that is known to contain just arithmetic operations and
2927 comparisons. Evaluate the operations in the tree substituting NEW0 for
2928 any occurrence of OLD0 as an operand of a comparison and likewise for
2929 NEW1 and OLD1. */
2931 static tree
2932 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2933 tree old1, tree new1)
2935 tree type = TREE_TYPE (arg);
2936 enum tree_code code = TREE_CODE (arg);
2937 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2939 /* We can handle some of the tcc_expression cases here. */
2940 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2941 tclass = tcc_unary;
2942 else if (tclass == tcc_expression
2943 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2944 tclass = tcc_binary;
2946 switch (tclass)
2948 case tcc_unary:
2949 return fold_build1_loc (loc, code, type,
2950 eval_subst (loc, TREE_OPERAND (arg, 0),
2951 old0, new0, old1, new1));
2953 case tcc_binary:
2954 return fold_build2_loc (loc, code, type,
2955 eval_subst (loc, TREE_OPERAND (arg, 0),
2956 old0, new0, old1, new1),
2957 eval_subst (loc, TREE_OPERAND (arg, 1),
2958 old0, new0, old1, new1));
2960 case tcc_expression:
2961 switch (code)
2963 case SAVE_EXPR:
2964 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2965 old1, new1);
2967 case COMPOUND_EXPR:
2968 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2969 old1, new1);
2971 case COND_EXPR:
2972 return fold_build3_loc (loc, code, type,
2973 eval_subst (loc, TREE_OPERAND (arg, 0),
2974 old0, new0, old1, new1),
2975 eval_subst (loc, TREE_OPERAND (arg, 1),
2976 old0, new0, old1, new1),
2977 eval_subst (loc, TREE_OPERAND (arg, 2),
2978 old0, new0, old1, new1));
2979 default:
2980 break;
2982 /* Fall through - ??? */
2984 case tcc_comparison:
2986 tree arg0 = TREE_OPERAND (arg, 0);
2987 tree arg1 = TREE_OPERAND (arg, 1);
2989 /* We need to check both for exact equality and tree equality. The
2990 former will be true if the operand has a side-effect. In that
2991 case, we know the operand occurred exactly once. */
2993 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2994 arg0 = new0;
2995 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2996 arg0 = new1;
2998 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2999 arg1 = new0;
3000 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3001 arg1 = new1;
3003 return fold_build2_loc (loc, code, type, arg0, arg1);
3006 default:
3007 return arg;
3011 /* Return a tree for the case when the result of an expression is RESULT
3012 converted to TYPE and OMITTED was previously an operand of the expression
3013 but is now not needed (e.g., we folded OMITTED * 0).
3015 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3016 the conversion of RESULT to TYPE. */
3018 tree
3019 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3021 tree t = fold_convert_loc (loc, type, result);
3023 /* If the resulting operand is an empty statement, just return the omitted
3024 statement casted to void. */
3025 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3026 return build1_loc (loc, NOP_EXPR, void_type_node,
3027 fold_ignored_result (omitted));
3029 if (TREE_SIDE_EFFECTS (omitted))
3030 return build2_loc (loc, COMPOUND_EXPR, type,
3031 fold_ignored_result (omitted), t);
3033 return non_lvalue_loc (loc, t);
3036 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3038 static tree
3039 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3040 tree omitted)
3042 tree t = fold_convert_loc (loc, type, result);
3044 /* If the resulting operand is an empty statement, just return the omitted
3045 statement casted to void. */
3046 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3047 return build1_loc (loc, NOP_EXPR, void_type_node,
3048 fold_ignored_result (omitted));
3050 if (TREE_SIDE_EFFECTS (omitted))
3051 return build2_loc (loc, COMPOUND_EXPR, type,
3052 fold_ignored_result (omitted), t);
3054 return pedantic_non_lvalue_loc (loc, t);
3057 /* Return a tree for the case when the result of an expression is RESULT
3058 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3059 of the expression but are now not needed.
3061 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3062 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3063 evaluated before OMITTED2. Otherwise, if neither has side effects,
3064 just do the conversion of RESULT to TYPE. */
3066 tree
3067 omit_two_operands_loc (location_t loc, tree type, tree result,
3068 tree omitted1, tree omitted2)
3070 tree t = fold_convert_loc (loc, type, result);
3072 if (TREE_SIDE_EFFECTS (omitted2))
3073 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3074 if (TREE_SIDE_EFFECTS (omitted1))
3075 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3077 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3081 /* Return a simplified tree node for the truth-negation of ARG. This
3082 never alters ARG itself. We assume that ARG is an operation that
3083 returns a truth value (0 or 1).
3085 FIXME: one would think we would fold the result, but it causes
3086 problems with the dominator optimizer. */
3088 static tree
3089 fold_truth_not_expr (location_t loc, tree arg)
3091 tree type = TREE_TYPE (arg);
3092 enum tree_code code = TREE_CODE (arg);
3093 location_t loc1, loc2;
3095 /* If this is a comparison, we can simply invert it, except for
3096 floating-point non-equality comparisons, in which case we just
3097 enclose a TRUTH_NOT_EXPR around what we have. */
3099 if (TREE_CODE_CLASS (code) == tcc_comparison)
3101 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3102 if (FLOAT_TYPE_P (op_type)
3103 && flag_trapping_math
3104 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3105 && code != NE_EXPR && code != EQ_EXPR)
3106 return NULL_TREE;
3108 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3109 if (code == ERROR_MARK)
3110 return NULL_TREE;
3112 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3113 TREE_OPERAND (arg, 1));
3116 switch (code)
3118 case INTEGER_CST:
3119 return constant_boolean_node (integer_zerop (arg), type);
3121 case TRUTH_AND_EXPR:
3122 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3123 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3124 return build2_loc (loc, TRUTH_OR_EXPR, type,
3125 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3126 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3128 case TRUTH_OR_EXPR:
3129 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3130 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3131 return build2_loc (loc, TRUTH_AND_EXPR, type,
3132 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3133 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3135 case TRUTH_XOR_EXPR:
3136 /* Here we can invert either operand. We invert the first operand
3137 unless the second operand is a TRUTH_NOT_EXPR in which case our
3138 result is the XOR of the first operand with the inside of the
3139 negation of the second operand. */
3141 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3142 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3143 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3144 else
3145 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3146 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3147 TREE_OPERAND (arg, 1));
3149 case TRUTH_ANDIF_EXPR:
3150 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3151 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3152 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3153 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3154 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3156 case TRUTH_ORIF_EXPR:
3157 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3158 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3159 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3160 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3161 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3163 case TRUTH_NOT_EXPR:
3164 return TREE_OPERAND (arg, 0);
3166 case COND_EXPR:
3168 tree arg1 = TREE_OPERAND (arg, 1);
3169 tree arg2 = TREE_OPERAND (arg, 2);
3171 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3172 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3174 /* A COND_EXPR may have a throw as one operand, which
3175 then has void type. Just leave void operands
3176 as they are. */
3177 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3178 VOID_TYPE_P (TREE_TYPE (arg1))
3179 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3180 VOID_TYPE_P (TREE_TYPE (arg2))
3181 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3184 case COMPOUND_EXPR:
3185 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3186 return build2_loc (loc, COMPOUND_EXPR, type,
3187 TREE_OPERAND (arg, 0),
3188 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3190 case NON_LVALUE_EXPR:
3191 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3192 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3194 CASE_CONVERT:
3195 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3196 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3198 /* ... fall through ... */
3200 case FLOAT_EXPR:
3201 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3202 return build1_loc (loc, TREE_CODE (arg), type,
3203 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3205 case BIT_AND_EXPR:
3206 if (!integer_onep (TREE_OPERAND (arg, 1)))
3207 return NULL_TREE;
3208 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3210 case SAVE_EXPR:
3211 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3213 case CLEANUP_POINT_EXPR:
3214 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3215 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3216 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3218 default:
3219 return NULL_TREE;
3223 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3224 assume that ARG is an operation that returns a truth value (0 or 1
3225 for scalars, 0 or -1 for vectors). Return the folded expression if
3226 folding is successful. Otherwise, return NULL_TREE. */
3228 static tree
3229 fold_invert_truthvalue (location_t loc, tree arg)
3231 tree type = TREE_TYPE (arg);
3232 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3233 ? BIT_NOT_EXPR
3234 : TRUTH_NOT_EXPR,
3235 type, arg);
3238 /* Return a simplified tree node for the truth-negation of ARG. This
3239 never alters ARG itself. We assume that ARG is an operation that
3240 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3242 tree
3243 invert_truthvalue_loc (location_t loc, tree arg)
3245 if (TREE_CODE (arg) == ERROR_MARK)
3246 return arg;
3248 tree type = TREE_TYPE (arg);
3249 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3250 ? BIT_NOT_EXPR
3251 : TRUTH_NOT_EXPR,
3252 type, arg);
3255 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3256 operands are another bit-wise operation with a common input. If so,
3257 distribute the bit operations to save an operation and possibly two if
3258 constants are involved. For example, convert
3259 (A | B) & (A | C) into A | (B & C)
3260 Further simplification will occur if B and C are constants.
3262 If this optimization cannot be done, 0 will be returned. */
3264 static tree
3265 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3266 tree arg0, tree arg1)
3268 tree common;
3269 tree left, right;
3271 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3272 || TREE_CODE (arg0) == code
3273 || (TREE_CODE (arg0) != BIT_AND_EXPR
3274 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3275 return 0;
3277 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3279 common = TREE_OPERAND (arg0, 0);
3280 left = TREE_OPERAND (arg0, 1);
3281 right = TREE_OPERAND (arg1, 1);
3283 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3285 common = TREE_OPERAND (arg0, 0);
3286 left = TREE_OPERAND (arg0, 1);
3287 right = TREE_OPERAND (arg1, 0);
3289 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3291 common = TREE_OPERAND (arg0, 1);
3292 left = TREE_OPERAND (arg0, 0);
3293 right = TREE_OPERAND (arg1, 1);
3295 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3297 common = TREE_OPERAND (arg0, 1);
3298 left = TREE_OPERAND (arg0, 0);
3299 right = TREE_OPERAND (arg1, 0);
3301 else
3302 return 0;
3304 common = fold_convert_loc (loc, type, common);
3305 left = fold_convert_loc (loc, type, left);
3306 right = fold_convert_loc (loc, type, right);
3307 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3308 fold_build2_loc (loc, code, type, left, right));
3311 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3312 with code CODE. This optimization is unsafe. */
3313 static tree
3314 distribute_real_division (location_t loc, enum tree_code code, tree type,
3315 tree arg0, tree arg1)
3317 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3318 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3320 /* (A / C) +- (B / C) -> (A +- B) / C. */
3321 if (mul0 == mul1
3322 && operand_equal_p (TREE_OPERAND (arg0, 1),
3323 TREE_OPERAND (arg1, 1), 0))
3324 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3325 fold_build2_loc (loc, code, type,
3326 TREE_OPERAND (arg0, 0),
3327 TREE_OPERAND (arg1, 0)),
3328 TREE_OPERAND (arg0, 1));
3330 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3331 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3332 TREE_OPERAND (arg1, 0), 0)
3333 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3334 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3336 REAL_VALUE_TYPE r0, r1;
3337 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3338 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3339 if (!mul0)
3340 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3341 if (!mul1)
3342 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3343 real_arithmetic (&r0, code, &r0, &r1);
3344 return fold_build2_loc (loc, MULT_EXPR, type,
3345 TREE_OPERAND (arg0, 0),
3346 build_real (type, r0));
3349 return NULL_TREE;
3352 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3353 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3355 static tree
3356 make_bit_field_ref (location_t loc, tree inner, tree type,
3357 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3359 tree result, bftype;
3361 if (bitpos == 0)
3363 tree size = TYPE_SIZE (TREE_TYPE (inner));
3364 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3365 || POINTER_TYPE_P (TREE_TYPE (inner)))
3366 && host_integerp (size, 0)
3367 && tree_low_cst (size, 0) == bitsize)
3368 return fold_convert_loc (loc, type, inner);
3371 bftype = type;
3372 if (TYPE_PRECISION (bftype) != bitsize
3373 || TYPE_UNSIGNED (bftype) == !unsignedp)
3374 bftype = build_nonstandard_integer_type (bitsize, 0);
3376 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3377 size_int (bitsize), bitsize_int (bitpos));
3379 if (bftype != type)
3380 result = fold_convert_loc (loc, type, result);
3382 return result;
3385 /* Optimize a bit-field compare.
3387 There are two cases: First is a compare against a constant and the
3388 second is a comparison of two items where the fields are at the same
3389 bit position relative to the start of a chunk (byte, halfword, word)
3390 large enough to contain it. In these cases we can avoid the shift
3391 implicit in bitfield extractions.
3393 For constants, we emit a compare of the shifted constant with the
3394 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3395 compared. For two fields at the same position, we do the ANDs with the
3396 similar mask and compare the result of the ANDs.
3398 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3399 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3400 are the left and right operands of the comparison, respectively.
3402 If the optimization described above can be done, we return the resulting
3403 tree. Otherwise we return zero. */
3405 static tree
3406 optimize_bit_field_compare (location_t loc, enum tree_code code,
3407 tree compare_type, tree lhs, tree rhs)
3409 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3410 tree type = TREE_TYPE (lhs);
3411 tree signed_type, unsigned_type;
3412 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3413 enum machine_mode lmode, rmode, nmode;
3414 int lunsignedp, runsignedp;
3415 int lvolatilep = 0, rvolatilep = 0;
3416 tree linner, rinner = NULL_TREE;
3417 tree mask;
3418 tree offset;
3420 /* In the strict volatile bitfields case, doing code changes here may prevent
3421 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3422 if (flag_strict_volatile_bitfields > 0)
3423 return 0;
3425 /* Get all the information about the extractions being done. If the bit size
3426 if the same as the size of the underlying object, we aren't doing an
3427 extraction at all and so can do nothing. We also don't want to
3428 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3429 then will no longer be able to replace it. */
3430 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3431 &lunsignedp, &lvolatilep, false);
3432 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3433 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3434 return 0;
3436 if (!const_p)
3438 /* If this is not a constant, we can only do something if bit positions,
3439 sizes, and signedness are the same. */
3440 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3441 &runsignedp, &rvolatilep, false);
3443 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3444 || lunsignedp != runsignedp || offset != 0
3445 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3446 return 0;
3449 /* See if we can find a mode to refer to this field. We should be able to,
3450 but fail if we can't. */
3451 if (lvolatilep
3452 && GET_MODE_BITSIZE (lmode) > 0
3453 && flag_strict_volatile_bitfields > 0)
3454 nmode = lmode;
3455 else
3456 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3457 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3458 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3459 TYPE_ALIGN (TREE_TYPE (rinner))),
3460 word_mode, lvolatilep || rvolatilep);
3461 if (nmode == VOIDmode)
3462 return 0;
3464 /* Set signed and unsigned types of the precision of this mode for the
3465 shifts below. */
3466 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3467 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3469 /* Compute the bit position and size for the new reference and our offset
3470 within it. If the new reference is the same size as the original, we
3471 won't optimize anything, so return zero. */
3472 nbitsize = GET_MODE_BITSIZE (nmode);
3473 nbitpos = lbitpos & ~ (nbitsize - 1);
3474 lbitpos -= nbitpos;
3475 if (nbitsize == lbitsize)
3476 return 0;
3478 if (BYTES_BIG_ENDIAN)
3479 lbitpos = nbitsize - lbitsize - lbitpos;
3481 /* Make the mask to be used against the extracted field. */
3482 mask = build_int_cst_type (unsigned_type, -1);
3483 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3484 mask = const_binop (RSHIFT_EXPR, mask,
3485 size_int (nbitsize - lbitsize - lbitpos));
3487 if (! const_p)
3488 /* If not comparing with constant, just rework the comparison
3489 and return. */
3490 return fold_build2_loc (loc, code, compare_type,
3491 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3492 make_bit_field_ref (loc, linner,
3493 unsigned_type,
3494 nbitsize, nbitpos,
3496 mask),
3497 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3498 make_bit_field_ref (loc, rinner,
3499 unsigned_type,
3500 nbitsize, nbitpos,
3502 mask));
3504 /* Otherwise, we are handling the constant case. See if the constant is too
3505 big for the field. Warn and return a tree of for 0 (false) if so. We do
3506 this not only for its own sake, but to avoid having to test for this
3507 error case below. If we didn't, we might generate wrong code.
3509 For unsigned fields, the constant shifted right by the field length should
3510 be all zero. For signed fields, the high-order bits should agree with
3511 the sign bit. */
3513 if (lunsignedp)
3515 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3516 fold_convert_loc (loc,
3517 unsigned_type, rhs),
3518 size_int (lbitsize))))
3520 warning (0, "comparison is always %d due to width of bit-field",
3521 code == NE_EXPR);
3522 return constant_boolean_node (code == NE_EXPR, compare_type);
3525 else
3527 tree tem = const_binop (RSHIFT_EXPR,
3528 fold_convert_loc (loc, signed_type, rhs),
3529 size_int (lbitsize - 1));
3530 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3532 warning (0, "comparison is always %d due to width of bit-field",
3533 code == NE_EXPR);
3534 return constant_boolean_node (code == NE_EXPR, compare_type);
3538 /* Single-bit compares should always be against zero. */
3539 if (lbitsize == 1 && ! integer_zerop (rhs))
3541 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3542 rhs = build_int_cst (type, 0);
3545 /* Make a new bitfield reference, shift the constant over the
3546 appropriate number of bits and mask it with the computed mask
3547 (in case this was a signed field). If we changed it, make a new one. */
3548 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3549 if (lvolatilep)
3551 TREE_SIDE_EFFECTS (lhs) = 1;
3552 TREE_THIS_VOLATILE (lhs) = 1;
3555 rhs = const_binop (BIT_AND_EXPR,
3556 const_binop (LSHIFT_EXPR,
3557 fold_convert_loc (loc, unsigned_type, rhs),
3558 size_int (lbitpos)),
3559 mask);
3561 lhs = build2_loc (loc, code, compare_type,
3562 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3563 return lhs;
3566 /* Subroutine for fold_truth_andor_1: decode a field reference.
3568 If EXP is a comparison reference, we return the innermost reference.
3570 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3571 set to the starting bit number.
3573 If the innermost field can be completely contained in a mode-sized
3574 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3576 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3577 otherwise it is not changed.
3579 *PUNSIGNEDP is set to the signedness of the field.
3581 *PMASK is set to the mask used. This is either contained in a
3582 BIT_AND_EXPR or derived from the width of the field.
3584 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3586 Return 0 if this is not a component reference or is one that we can't
3587 do anything with. */
3589 static tree
3590 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3591 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3592 int *punsignedp, int *pvolatilep,
3593 tree *pmask, tree *pand_mask)
3595 tree outer_type = 0;
3596 tree and_mask = 0;
3597 tree mask, inner, offset;
3598 tree unsigned_type;
3599 unsigned int precision;
3601 /* All the optimizations using this function assume integer fields.
3602 There are problems with FP fields since the type_for_size call
3603 below can fail for, e.g., XFmode. */
3604 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3605 return 0;
3607 /* We are interested in the bare arrangement of bits, so strip everything
3608 that doesn't affect the machine mode. However, record the type of the
3609 outermost expression if it may matter below. */
3610 if (CONVERT_EXPR_P (exp)
3611 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3612 outer_type = TREE_TYPE (exp);
3613 STRIP_NOPS (exp);
3615 if (TREE_CODE (exp) == BIT_AND_EXPR)
3617 and_mask = TREE_OPERAND (exp, 1);
3618 exp = TREE_OPERAND (exp, 0);
3619 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3620 if (TREE_CODE (and_mask) != INTEGER_CST)
3621 return 0;
3624 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3625 punsignedp, pvolatilep, false);
3626 if ((inner == exp && and_mask == 0)
3627 || *pbitsize < 0 || offset != 0
3628 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3629 return 0;
3631 /* If the number of bits in the reference is the same as the bitsize of
3632 the outer type, then the outer type gives the signedness. Otherwise
3633 (in case of a small bitfield) the signedness is unchanged. */
3634 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3635 *punsignedp = TYPE_UNSIGNED (outer_type);
3637 /* Compute the mask to access the bitfield. */
3638 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3639 precision = TYPE_PRECISION (unsigned_type);
3641 mask = build_int_cst_type (unsigned_type, -1);
3643 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3644 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3646 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3647 if (and_mask != 0)
3648 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3649 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3651 *pmask = mask;
3652 *pand_mask = and_mask;
3653 return inner;
3656 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3657 bit positions. */
3659 static int
3660 all_ones_mask_p (const_tree mask, int size)
3662 tree type = TREE_TYPE (mask);
3663 unsigned int precision = TYPE_PRECISION (type);
3664 tree tmask;
3666 tmask = build_int_cst_type (signed_type_for (type), -1);
3668 return
3669 tree_int_cst_equal (mask,
3670 const_binop (RSHIFT_EXPR,
3671 const_binop (LSHIFT_EXPR, tmask,
3672 size_int (precision - size)),
3673 size_int (precision - size)));
3676 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3677 represents the sign bit of EXP's type. If EXP represents a sign
3678 or zero extension, also test VAL against the unextended type.
3679 The return value is the (sub)expression whose sign bit is VAL,
3680 or NULL_TREE otherwise. */
3682 static tree
3683 sign_bit_p (tree exp, const_tree val)
3685 unsigned HOST_WIDE_INT mask_lo, lo;
3686 HOST_WIDE_INT mask_hi, hi;
3687 int width;
3688 tree t;
3690 /* Tree EXP must have an integral type. */
3691 t = TREE_TYPE (exp);
3692 if (! INTEGRAL_TYPE_P (t))
3693 return NULL_TREE;
3695 /* Tree VAL must be an integer constant. */
3696 if (TREE_CODE (val) != INTEGER_CST
3697 || TREE_OVERFLOW (val))
3698 return NULL_TREE;
3700 width = TYPE_PRECISION (t);
3701 if (width > HOST_BITS_PER_WIDE_INT)
3703 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3704 lo = 0;
3706 mask_hi = ((unsigned HOST_WIDE_INT) -1
3707 >> (HOST_BITS_PER_DOUBLE_INT - width));
3708 mask_lo = -1;
3710 else
3712 hi = 0;
3713 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3715 mask_hi = 0;
3716 mask_lo = ((unsigned HOST_WIDE_INT) -1
3717 >> (HOST_BITS_PER_WIDE_INT - width));
3720 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3721 treat VAL as if it were unsigned. */
3722 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3723 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3724 return exp;
3726 /* Handle extension from a narrower type. */
3727 if (TREE_CODE (exp) == NOP_EXPR
3728 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3729 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3731 return NULL_TREE;
3734 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3735 to be evaluated unconditionally. */
3737 static int
3738 simple_operand_p (const_tree exp)
3740 /* Strip any conversions that don't change the machine mode. */
3741 STRIP_NOPS (exp);
3743 return (CONSTANT_CLASS_P (exp)
3744 || TREE_CODE (exp) == SSA_NAME
3745 || (DECL_P (exp)
3746 && ! TREE_ADDRESSABLE (exp)
3747 && ! TREE_THIS_VOLATILE (exp)
3748 && ! DECL_NONLOCAL (exp)
3749 /* Don't regard global variables as simple. They may be
3750 allocated in ways unknown to the compiler (shared memory,
3751 #pragma weak, etc). */
3752 && ! TREE_PUBLIC (exp)
3753 && ! DECL_EXTERNAL (exp)
3754 /* Loading a static variable is unduly expensive, but global
3755 registers aren't expensive. */
3756 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3759 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3760 to be evaluated unconditionally.
3761 I addition to simple_operand_p, we assume that comparisons, conversions,
3762 and logic-not operations are simple, if their operands are simple, too. */
3764 static bool
3765 simple_operand_p_2 (tree exp)
3767 enum tree_code code;
3769 if (TREE_SIDE_EFFECTS (exp)
3770 || tree_could_trap_p (exp))
3771 return false;
3773 while (CONVERT_EXPR_P (exp))
3774 exp = TREE_OPERAND (exp, 0);
3776 code = TREE_CODE (exp);
3778 if (TREE_CODE_CLASS (code) == tcc_comparison)
3779 return (simple_operand_p (TREE_OPERAND (exp, 0))
3780 && simple_operand_p (TREE_OPERAND (exp, 1)));
3782 if (code == TRUTH_NOT_EXPR)
3783 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3785 return simple_operand_p (exp);
3789 /* The following functions are subroutines to fold_range_test and allow it to
3790 try to change a logical combination of comparisons into a range test.
3792 For example, both
3793 X == 2 || X == 3 || X == 4 || X == 5
3795 X >= 2 && X <= 5
3796 are converted to
3797 (unsigned) (X - 2) <= 3
3799 We describe each set of comparisons as being either inside or outside
3800 a range, using a variable named like IN_P, and then describe the
3801 range with a lower and upper bound. If one of the bounds is omitted,
3802 it represents either the highest or lowest value of the type.
3804 In the comments below, we represent a range by two numbers in brackets
3805 preceded by a "+" to designate being inside that range, or a "-" to
3806 designate being outside that range, so the condition can be inverted by
3807 flipping the prefix. An omitted bound is represented by a "-". For
3808 example, "- [-, 10]" means being outside the range starting at the lowest
3809 possible value and ending at 10, in other words, being greater than 10.
3810 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3811 always false.
3813 We set up things so that the missing bounds are handled in a consistent
3814 manner so neither a missing bound nor "true" and "false" need to be
3815 handled using a special case. */
3817 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3818 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3819 and UPPER1_P are nonzero if the respective argument is an upper bound
3820 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3821 must be specified for a comparison. ARG1 will be converted to ARG0's
3822 type if both are specified. */
3824 static tree
3825 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3826 tree arg1, int upper1_p)
3828 tree tem;
3829 int result;
3830 int sgn0, sgn1;
3832 /* If neither arg represents infinity, do the normal operation.
3833 Else, if not a comparison, return infinity. Else handle the special
3834 comparison rules. Note that most of the cases below won't occur, but
3835 are handled for consistency. */
3837 if (arg0 != 0 && arg1 != 0)
3839 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3840 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3841 STRIP_NOPS (tem);
3842 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3845 if (TREE_CODE_CLASS (code) != tcc_comparison)
3846 return 0;
3848 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3849 for neither. In real maths, we cannot assume open ended ranges are
3850 the same. But, this is computer arithmetic, where numbers are finite.
3851 We can therefore make the transformation of any unbounded range with
3852 the value Z, Z being greater than any representable number. This permits
3853 us to treat unbounded ranges as equal. */
3854 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3855 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3856 switch (code)
3858 case EQ_EXPR:
3859 result = sgn0 == sgn1;
3860 break;
3861 case NE_EXPR:
3862 result = sgn0 != sgn1;
3863 break;
3864 case LT_EXPR:
3865 result = sgn0 < sgn1;
3866 break;
3867 case LE_EXPR:
3868 result = sgn0 <= sgn1;
3869 break;
3870 case GT_EXPR:
3871 result = sgn0 > sgn1;
3872 break;
3873 case GE_EXPR:
3874 result = sgn0 >= sgn1;
3875 break;
3876 default:
3877 gcc_unreachable ();
3880 return constant_boolean_node (result, type);
3883 /* Helper routine for make_range. Perform one step for it, return
3884 new expression if the loop should continue or NULL_TREE if it should
3885 stop. */
3887 tree
3888 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3889 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3890 bool *strict_overflow_p)
3892 tree arg0_type = TREE_TYPE (arg0);
3893 tree n_low, n_high, low = *p_low, high = *p_high;
3894 int in_p = *p_in_p, n_in_p;
3896 switch (code)
3898 case TRUTH_NOT_EXPR:
3899 /* We can only do something if the range is testing for zero. */
3900 if (low == NULL_TREE || high == NULL_TREE
3901 || ! integer_zerop (low) || ! integer_zerop (high))
3902 return NULL_TREE;
3903 *p_in_p = ! in_p;
3904 return arg0;
3906 case EQ_EXPR: case NE_EXPR:
3907 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3908 /* We can only do something if the range is testing for zero
3909 and if the second operand is an integer constant. Note that
3910 saying something is "in" the range we make is done by
3911 complementing IN_P since it will set in the initial case of
3912 being not equal to zero; "out" is leaving it alone. */
3913 if (low == NULL_TREE || high == NULL_TREE
3914 || ! integer_zerop (low) || ! integer_zerop (high)
3915 || TREE_CODE (arg1) != INTEGER_CST)
3916 return NULL_TREE;
3918 switch (code)
3920 case NE_EXPR: /* - [c, c] */
3921 low = high = arg1;
3922 break;
3923 case EQ_EXPR: /* + [c, c] */
3924 in_p = ! in_p, low = high = arg1;
3925 break;
3926 case GT_EXPR: /* - [-, c] */
3927 low = 0, high = arg1;
3928 break;
3929 case GE_EXPR: /* + [c, -] */
3930 in_p = ! in_p, low = arg1, high = 0;
3931 break;
3932 case LT_EXPR: /* - [c, -] */
3933 low = arg1, high = 0;
3934 break;
3935 case LE_EXPR: /* + [-, c] */
3936 in_p = ! in_p, low = 0, high = arg1;
3937 break;
3938 default:
3939 gcc_unreachable ();
3942 /* If this is an unsigned comparison, we also know that EXP is
3943 greater than or equal to zero. We base the range tests we make
3944 on that fact, so we record it here so we can parse existing
3945 range tests. We test arg0_type since often the return type
3946 of, e.g. EQ_EXPR, is boolean. */
3947 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3949 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3950 in_p, low, high, 1,
3951 build_int_cst (arg0_type, 0),
3952 NULL_TREE))
3953 return NULL_TREE;
3955 in_p = n_in_p, low = n_low, high = n_high;
3957 /* If the high bound is missing, but we have a nonzero low
3958 bound, reverse the range so it goes from zero to the low bound
3959 minus 1. */
3960 if (high == 0 && low && ! integer_zerop (low))
3962 in_p = ! in_p;
3963 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3964 integer_one_node, 0);
3965 low = build_int_cst (arg0_type, 0);
3969 *p_low = low;
3970 *p_high = high;
3971 *p_in_p = in_p;
3972 return arg0;
3974 case NEGATE_EXPR:
3975 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3976 low and high are non-NULL, then normalize will DTRT. */
3977 if (!TYPE_UNSIGNED (arg0_type)
3978 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3980 if (low == NULL_TREE)
3981 low = TYPE_MIN_VALUE (arg0_type);
3982 if (high == NULL_TREE)
3983 high = TYPE_MAX_VALUE (arg0_type);
3986 /* (-x) IN [a,b] -> x in [-b, -a] */
3987 n_low = range_binop (MINUS_EXPR, exp_type,
3988 build_int_cst (exp_type, 0),
3989 0, high, 1);
3990 n_high = range_binop (MINUS_EXPR, exp_type,
3991 build_int_cst (exp_type, 0),
3992 0, low, 0);
3993 if (n_high != 0 && TREE_OVERFLOW (n_high))
3994 return NULL_TREE;
3995 goto normalize;
3997 case BIT_NOT_EXPR:
3998 /* ~ X -> -X - 1 */
3999 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4000 build_int_cst (exp_type, 1));
4002 case PLUS_EXPR:
4003 case MINUS_EXPR:
4004 if (TREE_CODE (arg1) != INTEGER_CST)
4005 return NULL_TREE;
4007 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4008 move a constant to the other side. */
4009 if (!TYPE_UNSIGNED (arg0_type)
4010 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4011 return NULL_TREE;
4013 /* If EXP is signed, any overflow in the computation is undefined,
4014 so we don't worry about it so long as our computations on
4015 the bounds don't overflow. For unsigned, overflow is defined
4016 and this is exactly the right thing. */
4017 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4018 arg0_type, low, 0, arg1, 0);
4019 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4020 arg0_type, high, 1, arg1, 0);
4021 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4022 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4023 return NULL_TREE;
4025 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4026 *strict_overflow_p = true;
4028 normalize:
4029 /* Check for an unsigned range which has wrapped around the maximum
4030 value thus making n_high < n_low, and normalize it. */
4031 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4033 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4034 integer_one_node, 0);
4035 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4036 integer_one_node, 0);
4038 /* If the range is of the form +/- [ x+1, x ], we won't
4039 be able to normalize it. But then, it represents the
4040 whole range or the empty set, so make it
4041 +/- [ -, - ]. */
4042 if (tree_int_cst_equal (n_low, low)
4043 && tree_int_cst_equal (n_high, high))
4044 low = high = 0;
4045 else
4046 in_p = ! in_p;
4048 else
4049 low = n_low, high = n_high;
4051 *p_low = low;
4052 *p_high = high;
4053 *p_in_p = in_p;
4054 return arg0;
4056 CASE_CONVERT:
4057 case NON_LVALUE_EXPR:
4058 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4059 return NULL_TREE;
4061 if (! INTEGRAL_TYPE_P (arg0_type)
4062 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4063 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4064 return NULL_TREE;
4066 n_low = low, n_high = high;
4068 if (n_low != 0)
4069 n_low = fold_convert_loc (loc, arg0_type, n_low);
4071 if (n_high != 0)
4072 n_high = fold_convert_loc (loc, arg0_type, n_high);
4074 /* If we're converting arg0 from an unsigned type, to exp,
4075 a signed type, we will be doing the comparison as unsigned.
4076 The tests above have already verified that LOW and HIGH
4077 are both positive.
4079 So we have to ensure that we will handle large unsigned
4080 values the same way that the current signed bounds treat
4081 negative values. */
4083 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4085 tree high_positive;
4086 tree equiv_type;
4087 /* For fixed-point modes, we need to pass the saturating flag
4088 as the 2nd parameter. */
4089 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4090 equiv_type
4091 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4092 TYPE_SATURATING (arg0_type));
4093 else
4094 equiv_type
4095 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4097 /* A range without an upper bound is, naturally, unbounded.
4098 Since convert would have cropped a very large value, use
4099 the max value for the destination type. */
4100 high_positive
4101 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4102 : TYPE_MAX_VALUE (arg0_type);
4104 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4105 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4106 fold_convert_loc (loc, arg0_type,
4107 high_positive),
4108 build_int_cst (arg0_type, 1));
4110 /* If the low bound is specified, "and" the range with the
4111 range for which the original unsigned value will be
4112 positive. */
4113 if (low != 0)
4115 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4116 1, fold_convert_loc (loc, arg0_type,
4117 integer_zero_node),
4118 high_positive))
4119 return NULL_TREE;
4121 in_p = (n_in_p == in_p);
4123 else
4125 /* Otherwise, "or" the range with the range of the input
4126 that will be interpreted as negative. */
4127 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4128 1, fold_convert_loc (loc, arg0_type,
4129 integer_zero_node),
4130 high_positive))
4131 return NULL_TREE;
4133 in_p = (in_p != n_in_p);
4137 *p_low = n_low;
4138 *p_high = n_high;
4139 *p_in_p = in_p;
4140 return arg0;
4142 default:
4143 return NULL_TREE;
4147 /* Given EXP, a logical expression, set the range it is testing into
4148 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4149 actually being tested. *PLOW and *PHIGH will be made of the same
4150 type as the returned expression. If EXP is not a comparison, we
4151 will most likely not be returning a useful value and range. Set
4152 *STRICT_OVERFLOW_P to true if the return value is only valid
4153 because signed overflow is undefined; otherwise, do not change
4154 *STRICT_OVERFLOW_P. */
4156 tree
4157 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4158 bool *strict_overflow_p)
4160 enum tree_code code;
4161 tree arg0, arg1 = NULL_TREE;
4162 tree exp_type, nexp;
4163 int in_p;
4164 tree low, high;
4165 location_t loc = EXPR_LOCATION (exp);
4167 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4168 and see if we can refine the range. Some of the cases below may not
4169 happen, but it doesn't seem worth worrying about this. We "continue"
4170 the outer loop when we've changed something; otherwise we "break"
4171 the switch, which will "break" the while. */
4173 in_p = 0;
4174 low = high = build_int_cst (TREE_TYPE (exp), 0);
4176 while (1)
4178 code = TREE_CODE (exp);
4179 exp_type = TREE_TYPE (exp);
4180 arg0 = NULL_TREE;
4182 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4184 if (TREE_OPERAND_LENGTH (exp) > 0)
4185 arg0 = TREE_OPERAND (exp, 0);
4186 if (TREE_CODE_CLASS (code) == tcc_binary
4187 || TREE_CODE_CLASS (code) == tcc_comparison
4188 || (TREE_CODE_CLASS (code) == tcc_expression
4189 && TREE_OPERAND_LENGTH (exp) > 1))
4190 arg1 = TREE_OPERAND (exp, 1);
4192 if (arg0 == NULL_TREE)
4193 break;
4195 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4196 &high, &in_p, strict_overflow_p);
4197 if (nexp == NULL_TREE)
4198 break;
4199 exp = nexp;
4202 /* If EXP is a constant, we can evaluate whether this is true or false. */
4203 if (TREE_CODE (exp) == INTEGER_CST)
4205 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4206 exp, 0, low, 0))
4207 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4208 exp, 1, high, 1)));
4209 low = high = 0;
4210 exp = 0;
4213 *pin_p = in_p, *plow = low, *phigh = high;
4214 return exp;
4217 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4218 type, TYPE, return an expression to test if EXP is in (or out of, depending
4219 on IN_P) the range. Return 0 if the test couldn't be created. */
4221 tree
4222 build_range_check (location_t loc, tree type, tree exp, int in_p,
4223 tree low, tree high)
4225 tree etype = TREE_TYPE (exp), value;
4227 #ifdef HAVE_canonicalize_funcptr_for_compare
4228 /* Disable this optimization for function pointer expressions
4229 on targets that require function pointer canonicalization. */
4230 if (HAVE_canonicalize_funcptr_for_compare
4231 && TREE_CODE (etype) == POINTER_TYPE
4232 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4233 return NULL_TREE;
4234 #endif
4236 if (! in_p)
4238 value = build_range_check (loc, type, exp, 1, low, high);
4239 if (value != 0)
4240 return invert_truthvalue_loc (loc, value);
4242 return 0;
4245 if (low == 0 && high == 0)
4246 return build_int_cst (type, 1);
4248 if (low == 0)
4249 return fold_build2_loc (loc, LE_EXPR, type, exp,
4250 fold_convert_loc (loc, etype, high));
4252 if (high == 0)
4253 return fold_build2_loc (loc, GE_EXPR, type, exp,
4254 fold_convert_loc (loc, etype, low));
4256 if (operand_equal_p (low, high, 0))
4257 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4258 fold_convert_loc (loc, etype, low));
4260 if (integer_zerop (low))
4262 if (! TYPE_UNSIGNED (etype))
4264 etype = unsigned_type_for (etype);
4265 high = fold_convert_loc (loc, etype, high);
4266 exp = fold_convert_loc (loc, etype, exp);
4268 return build_range_check (loc, type, exp, 1, 0, high);
4271 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4272 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4274 unsigned HOST_WIDE_INT lo;
4275 HOST_WIDE_INT hi;
4276 int prec;
4278 prec = TYPE_PRECISION (etype);
4279 if (prec <= HOST_BITS_PER_WIDE_INT)
4281 hi = 0;
4282 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4284 else
4286 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4287 lo = (unsigned HOST_WIDE_INT) -1;
4290 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4292 if (TYPE_UNSIGNED (etype))
4294 tree signed_etype = signed_type_for (etype);
4295 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4296 etype
4297 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4298 else
4299 etype = signed_etype;
4300 exp = fold_convert_loc (loc, etype, exp);
4302 return fold_build2_loc (loc, GT_EXPR, type, exp,
4303 build_int_cst (etype, 0));
4307 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4308 This requires wrap-around arithmetics for the type of the expression.
4309 First make sure that arithmetics in this type is valid, then make sure
4310 that it wraps around. */
4311 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4312 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4313 TYPE_UNSIGNED (etype));
4315 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4317 tree utype, minv, maxv;
4319 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4320 for the type in question, as we rely on this here. */
4321 utype = unsigned_type_for (etype);
4322 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4323 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4324 integer_one_node, 1);
4325 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4327 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4328 minv, 1, maxv, 1)))
4329 etype = utype;
4330 else
4331 return 0;
4334 high = fold_convert_loc (loc, etype, high);
4335 low = fold_convert_loc (loc, etype, low);
4336 exp = fold_convert_loc (loc, etype, exp);
4338 value = const_binop (MINUS_EXPR, high, low);
4341 if (POINTER_TYPE_P (etype))
4343 if (value != 0 && !TREE_OVERFLOW (value))
4345 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4346 return build_range_check (loc, type,
4347 fold_build_pointer_plus_loc (loc, exp, low),
4348 1, build_int_cst (etype, 0), value);
4350 return 0;
4353 if (value != 0 && !TREE_OVERFLOW (value))
4354 return build_range_check (loc, type,
4355 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4356 1, build_int_cst (etype, 0), value);
4358 return 0;
4361 /* Return the predecessor of VAL in its type, handling the infinite case. */
4363 static tree
4364 range_predecessor (tree val)
4366 tree type = TREE_TYPE (val);
4368 if (INTEGRAL_TYPE_P (type)
4369 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4370 return 0;
4371 else
4372 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4375 /* Return the successor of VAL in its type, handling the infinite case. */
4377 static tree
4378 range_successor (tree val)
4380 tree type = TREE_TYPE (val);
4382 if (INTEGRAL_TYPE_P (type)
4383 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4384 return 0;
4385 else
4386 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4389 /* Given two ranges, see if we can merge them into one. Return 1 if we
4390 can, 0 if we can't. Set the output range into the specified parameters. */
4392 bool
4393 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4394 tree high0, int in1_p, tree low1, tree high1)
4396 int no_overlap;
4397 int subset;
4398 int temp;
4399 tree tem;
4400 int in_p;
4401 tree low, high;
4402 int lowequal = ((low0 == 0 && low1 == 0)
4403 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4404 low0, 0, low1, 0)));
4405 int highequal = ((high0 == 0 && high1 == 0)
4406 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4407 high0, 1, high1, 1)));
4409 /* Make range 0 be the range that starts first, or ends last if they
4410 start at the same value. Swap them if it isn't. */
4411 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4412 low0, 0, low1, 0))
4413 || (lowequal
4414 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4415 high1, 1, high0, 1))))
4417 temp = in0_p, in0_p = in1_p, in1_p = temp;
4418 tem = low0, low0 = low1, low1 = tem;
4419 tem = high0, high0 = high1, high1 = tem;
4422 /* Now flag two cases, whether the ranges are disjoint or whether the
4423 second range is totally subsumed in the first. Note that the tests
4424 below are simplified by the ones above. */
4425 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4426 high0, 1, low1, 0));
4427 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4428 high1, 1, high0, 1));
4430 /* We now have four cases, depending on whether we are including or
4431 excluding the two ranges. */
4432 if (in0_p && in1_p)
4434 /* If they don't overlap, the result is false. If the second range
4435 is a subset it is the result. Otherwise, the range is from the start
4436 of the second to the end of the first. */
4437 if (no_overlap)
4438 in_p = 0, low = high = 0;
4439 else if (subset)
4440 in_p = 1, low = low1, high = high1;
4441 else
4442 in_p = 1, low = low1, high = high0;
4445 else if (in0_p && ! in1_p)
4447 /* If they don't overlap, the result is the first range. If they are
4448 equal, the result is false. If the second range is a subset of the
4449 first, and the ranges begin at the same place, we go from just after
4450 the end of the second range to the end of the first. If the second
4451 range is not a subset of the first, or if it is a subset and both
4452 ranges end at the same place, the range starts at the start of the
4453 first range and ends just before the second range.
4454 Otherwise, we can't describe this as a single range. */
4455 if (no_overlap)
4456 in_p = 1, low = low0, high = high0;
4457 else if (lowequal && highequal)
4458 in_p = 0, low = high = 0;
4459 else if (subset && lowequal)
4461 low = range_successor (high1);
4462 high = high0;
4463 in_p = 1;
4464 if (low == 0)
4466 /* We are in the weird situation where high0 > high1 but
4467 high1 has no successor. Punt. */
4468 return 0;
4471 else if (! subset || highequal)
4473 low = low0;
4474 high = range_predecessor (low1);
4475 in_p = 1;
4476 if (high == 0)
4478 /* low0 < low1 but low1 has no predecessor. Punt. */
4479 return 0;
4482 else
4483 return 0;
4486 else if (! in0_p && in1_p)
4488 /* If they don't overlap, the result is the second range. If the second
4489 is a subset of the first, the result is false. Otherwise,
4490 the range starts just after the first range and ends at the
4491 end of the second. */
4492 if (no_overlap)
4493 in_p = 1, low = low1, high = high1;
4494 else if (subset || highequal)
4495 in_p = 0, low = high = 0;
4496 else
4498 low = range_successor (high0);
4499 high = high1;
4500 in_p = 1;
4501 if (low == 0)
4503 /* high1 > high0 but high0 has no successor. Punt. */
4504 return 0;
4509 else
4511 /* The case where we are excluding both ranges. Here the complex case
4512 is if they don't overlap. In that case, the only time we have a
4513 range is if they are adjacent. If the second is a subset of the
4514 first, the result is the first. Otherwise, the range to exclude
4515 starts at the beginning of the first range and ends at the end of the
4516 second. */
4517 if (no_overlap)
4519 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4520 range_successor (high0),
4521 1, low1, 0)))
4522 in_p = 0, low = low0, high = high1;
4523 else
4525 /* Canonicalize - [min, x] into - [-, x]. */
4526 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4527 switch (TREE_CODE (TREE_TYPE (low0)))
4529 case ENUMERAL_TYPE:
4530 if (TYPE_PRECISION (TREE_TYPE (low0))
4531 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4532 break;
4533 /* FALLTHROUGH */
4534 case INTEGER_TYPE:
4535 if (tree_int_cst_equal (low0,
4536 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4537 low0 = 0;
4538 break;
4539 case POINTER_TYPE:
4540 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4541 && integer_zerop (low0))
4542 low0 = 0;
4543 break;
4544 default:
4545 break;
4548 /* Canonicalize - [x, max] into - [x, -]. */
4549 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4550 switch (TREE_CODE (TREE_TYPE (high1)))
4552 case ENUMERAL_TYPE:
4553 if (TYPE_PRECISION (TREE_TYPE (high1))
4554 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4555 break;
4556 /* FALLTHROUGH */
4557 case INTEGER_TYPE:
4558 if (tree_int_cst_equal (high1,
4559 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4560 high1 = 0;
4561 break;
4562 case POINTER_TYPE:
4563 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4564 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4565 high1, 1,
4566 integer_one_node, 1)))
4567 high1 = 0;
4568 break;
4569 default:
4570 break;
4573 /* The ranges might be also adjacent between the maximum and
4574 minimum values of the given type. For
4575 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4576 return + [x + 1, y - 1]. */
4577 if (low0 == 0 && high1 == 0)
4579 low = range_successor (high0);
4580 high = range_predecessor (low1);
4581 if (low == 0 || high == 0)
4582 return 0;
4584 in_p = 1;
4586 else
4587 return 0;
4590 else if (subset)
4591 in_p = 0, low = low0, high = high0;
4592 else
4593 in_p = 0, low = low0, high = high1;
4596 *pin_p = in_p, *plow = low, *phigh = high;
4597 return 1;
4601 /* Subroutine of fold, looking inside expressions of the form
4602 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4603 of the COND_EXPR. This function is being used also to optimize
4604 A op B ? C : A, by reversing the comparison first.
4606 Return a folded expression whose code is not a COND_EXPR
4607 anymore, or NULL_TREE if no folding opportunity is found. */
4609 static tree
4610 fold_cond_expr_with_comparison (location_t loc, tree type,
4611 tree arg0, tree arg1, tree arg2)
4613 enum tree_code comp_code = TREE_CODE (arg0);
4614 tree arg00 = TREE_OPERAND (arg0, 0);
4615 tree arg01 = TREE_OPERAND (arg0, 1);
4616 tree arg1_type = TREE_TYPE (arg1);
4617 tree tem;
4619 STRIP_NOPS (arg1);
4620 STRIP_NOPS (arg2);
4622 /* If we have A op 0 ? A : -A, consider applying the following
4623 transformations:
4625 A == 0? A : -A same as -A
4626 A != 0? A : -A same as A
4627 A >= 0? A : -A same as abs (A)
4628 A > 0? A : -A same as abs (A)
4629 A <= 0? A : -A same as -abs (A)
4630 A < 0? A : -A same as -abs (A)
4632 None of these transformations work for modes with signed
4633 zeros. If A is +/-0, the first two transformations will
4634 change the sign of the result (from +0 to -0, or vice
4635 versa). The last four will fix the sign of the result,
4636 even though the original expressions could be positive or
4637 negative, depending on the sign of A.
4639 Note that all these transformations are correct if A is
4640 NaN, since the two alternatives (A and -A) are also NaNs. */
4641 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4642 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4643 ? real_zerop (arg01)
4644 : integer_zerop (arg01))
4645 && ((TREE_CODE (arg2) == NEGATE_EXPR
4646 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4647 /* In the case that A is of the form X-Y, '-A' (arg2) may
4648 have already been folded to Y-X, check for that. */
4649 || (TREE_CODE (arg1) == MINUS_EXPR
4650 && TREE_CODE (arg2) == MINUS_EXPR
4651 && operand_equal_p (TREE_OPERAND (arg1, 0),
4652 TREE_OPERAND (arg2, 1), 0)
4653 && operand_equal_p (TREE_OPERAND (arg1, 1),
4654 TREE_OPERAND (arg2, 0), 0))))
4655 switch (comp_code)
4657 case EQ_EXPR:
4658 case UNEQ_EXPR:
4659 tem = fold_convert_loc (loc, arg1_type, arg1);
4660 return pedantic_non_lvalue_loc (loc,
4661 fold_convert_loc (loc, type,
4662 negate_expr (tem)));
4663 case NE_EXPR:
4664 case LTGT_EXPR:
4665 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4666 case UNGE_EXPR:
4667 case UNGT_EXPR:
4668 if (flag_trapping_math)
4669 break;
4670 /* Fall through. */
4671 case GE_EXPR:
4672 case GT_EXPR:
4673 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4674 arg1 = fold_convert_loc (loc, signed_type_for
4675 (TREE_TYPE (arg1)), arg1);
4676 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4677 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4678 case UNLE_EXPR:
4679 case UNLT_EXPR:
4680 if (flag_trapping_math)
4681 break;
4682 case LE_EXPR:
4683 case LT_EXPR:
4684 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4685 arg1 = fold_convert_loc (loc, signed_type_for
4686 (TREE_TYPE (arg1)), arg1);
4687 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4688 return negate_expr (fold_convert_loc (loc, type, tem));
4689 default:
4690 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4691 break;
4694 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4695 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4696 both transformations are correct when A is NaN: A != 0
4697 is then true, and A == 0 is false. */
4699 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4700 && integer_zerop (arg01) && integer_zerop (arg2))
4702 if (comp_code == NE_EXPR)
4703 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4704 else if (comp_code == EQ_EXPR)
4705 return build_zero_cst (type);
4708 /* Try some transformations of A op B ? A : B.
4710 A == B? A : B same as B
4711 A != B? A : B same as A
4712 A >= B? A : B same as max (A, B)
4713 A > B? A : B same as max (B, A)
4714 A <= B? A : B same as min (A, B)
4715 A < B? A : B same as min (B, A)
4717 As above, these transformations don't work in the presence
4718 of signed zeros. For example, if A and B are zeros of
4719 opposite sign, the first two transformations will change
4720 the sign of the result. In the last four, the original
4721 expressions give different results for (A=+0, B=-0) and
4722 (A=-0, B=+0), but the transformed expressions do not.
4724 The first two transformations are correct if either A or B
4725 is a NaN. In the first transformation, the condition will
4726 be false, and B will indeed be chosen. In the case of the
4727 second transformation, the condition A != B will be true,
4728 and A will be chosen.
4730 The conversions to max() and min() are not correct if B is
4731 a number and A is not. The conditions in the original
4732 expressions will be false, so all four give B. The min()
4733 and max() versions would give a NaN instead. */
4734 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4735 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4736 /* Avoid these transformations if the COND_EXPR may be used
4737 as an lvalue in the C++ front-end. PR c++/19199. */
4738 && (in_gimple_form
4739 || VECTOR_TYPE_P (type)
4740 || (strcmp (lang_hooks.name, "GNU C++") != 0
4741 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4742 || ! maybe_lvalue_p (arg1)
4743 || ! maybe_lvalue_p (arg2)))
4745 tree comp_op0 = arg00;
4746 tree comp_op1 = arg01;
4747 tree comp_type = TREE_TYPE (comp_op0);
4749 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4750 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4752 comp_type = type;
4753 comp_op0 = arg1;
4754 comp_op1 = arg2;
4757 switch (comp_code)
4759 case EQ_EXPR:
4760 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4761 case NE_EXPR:
4762 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4763 case LE_EXPR:
4764 case LT_EXPR:
4765 case UNLE_EXPR:
4766 case UNLT_EXPR:
4767 /* In C++ a ?: expression can be an lvalue, so put the
4768 operand which will be used if they are equal first
4769 so that we can convert this back to the
4770 corresponding COND_EXPR. */
4771 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4773 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4774 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4775 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4776 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4777 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4778 comp_op1, comp_op0);
4779 return pedantic_non_lvalue_loc (loc,
4780 fold_convert_loc (loc, type, tem));
4782 break;
4783 case GE_EXPR:
4784 case GT_EXPR:
4785 case UNGE_EXPR:
4786 case UNGT_EXPR:
4787 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4789 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4790 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4791 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4792 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4793 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4794 comp_op1, comp_op0);
4795 return pedantic_non_lvalue_loc (loc,
4796 fold_convert_loc (loc, type, tem));
4798 break;
4799 case UNEQ_EXPR:
4800 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4801 return pedantic_non_lvalue_loc (loc,
4802 fold_convert_loc (loc, type, arg2));
4803 break;
4804 case LTGT_EXPR:
4805 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4806 return pedantic_non_lvalue_loc (loc,
4807 fold_convert_loc (loc, type, arg1));
4808 break;
4809 default:
4810 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4811 break;
4815 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4816 we might still be able to simplify this. For example,
4817 if C1 is one less or one more than C2, this might have started
4818 out as a MIN or MAX and been transformed by this function.
4819 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4821 if (INTEGRAL_TYPE_P (type)
4822 && TREE_CODE (arg01) == INTEGER_CST
4823 && TREE_CODE (arg2) == INTEGER_CST)
4824 switch (comp_code)
4826 case EQ_EXPR:
4827 if (TREE_CODE (arg1) == INTEGER_CST)
4828 break;
4829 /* We can replace A with C1 in this case. */
4830 arg1 = fold_convert_loc (loc, type, arg01);
4831 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4833 case LT_EXPR:
4834 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4835 MIN_EXPR, to preserve the signedness of the comparison. */
4836 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4837 OEP_ONLY_CONST)
4838 && operand_equal_p (arg01,
4839 const_binop (PLUS_EXPR, arg2,
4840 build_int_cst (type, 1)),
4841 OEP_ONLY_CONST))
4843 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4844 fold_convert_loc (loc, TREE_TYPE (arg00),
4845 arg2));
4846 return pedantic_non_lvalue_loc (loc,
4847 fold_convert_loc (loc, type, tem));
4849 break;
4851 case LE_EXPR:
4852 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4853 as above. */
4854 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4855 OEP_ONLY_CONST)
4856 && operand_equal_p (arg01,
4857 const_binop (MINUS_EXPR, arg2,
4858 build_int_cst (type, 1)),
4859 OEP_ONLY_CONST))
4861 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4862 fold_convert_loc (loc, TREE_TYPE (arg00),
4863 arg2));
4864 return pedantic_non_lvalue_loc (loc,
4865 fold_convert_loc (loc, type, tem));
4867 break;
4869 case GT_EXPR:
4870 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4871 MAX_EXPR, to preserve the signedness of the comparison. */
4872 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4873 OEP_ONLY_CONST)
4874 && operand_equal_p (arg01,
4875 const_binop (MINUS_EXPR, arg2,
4876 build_int_cst (type, 1)),
4877 OEP_ONLY_CONST))
4879 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4880 fold_convert_loc (loc, TREE_TYPE (arg00),
4881 arg2));
4882 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4884 break;
4886 case GE_EXPR:
4887 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4888 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4889 OEP_ONLY_CONST)
4890 && operand_equal_p (arg01,
4891 const_binop (PLUS_EXPR, arg2,
4892 build_int_cst (type, 1)),
4893 OEP_ONLY_CONST))
4895 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4896 fold_convert_loc (loc, TREE_TYPE (arg00),
4897 arg2));
4898 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4900 break;
4901 case NE_EXPR:
4902 break;
4903 default:
4904 gcc_unreachable ();
4907 return NULL_TREE;
4912 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4913 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4914 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4915 false) >= 2)
4916 #endif
4918 /* EXP is some logical combination of boolean tests. See if we can
4919 merge it into some range test. Return the new tree if so. */
4921 static tree
4922 fold_range_test (location_t loc, enum tree_code code, tree type,
4923 tree op0, tree op1)
4925 int or_op = (code == TRUTH_ORIF_EXPR
4926 || code == TRUTH_OR_EXPR);
4927 int in0_p, in1_p, in_p;
4928 tree low0, low1, low, high0, high1, high;
4929 bool strict_overflow_p = false;
4930 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4931 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4932 tree tem;
4933 const char * const warnmsg = G_("assuming signed overflow does not occur "
4934 "when simplifying range test");
4936 /* If this is an OR operation, invert both sides; we will invert
4937 again at the end. */
4938 if (or_op)
4939 in0_p = ! in0_p, in1_p = ! in1_p;
4941 /* If both expressions are the same, if we can merge the ranges, and we
4942 can build the range test, return it or it inverted. If one of the
4943 ranges is always true or always false, consider it to be the same
4944 expression as the other. */
4945 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4946 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4947 in1_p, low1, high1)
4948 && 0 != (tem = (build_range_check (loc, type,
4949 lhs != 0 ? lhs
4950 : rhs != 0 ? rhs : integer_zero_node,
4951 in_p, low, high))))
4953 if (strict_overflow_p)
4954 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4955 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4958 /* On machines where the branch cost is expensive, if this is a
4959 short-circuited branch and the underlying object on both sides
4960 is the same, make a non-short-circuit operation. */
4961 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4962 && lhs != 0 && rhs != 0
4963 && (code == TRUTH_ANDIF_EXPR
4964 || code == TRUTH_ORIF_EXPR)
4965 && operand_equal_p (lhs, rhs, 0))
4967 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4968 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4969 which cases we can't do this. */
4970 if (simple_operand_p (lhs))
4971 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4972 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4973 type, op0, op1);
4975 else if (!lang_hooks.decls.global_bindings_p ()
4976 && !CONTAINS_PLACEHOLDER_P (lhs))
4978 tree common = save_expr (lhs);
4980 if (0 != (lhs = build_range_check (loc, type, common,
4981 or_op ? ! in0_p : in0_p,
4982 low0, high0))
4983 && (0 != (rhs = build_range_check (loc, type, common,
4984 or_op ? ! in1_p : in1_p,
4985 low1, high1))))
4987 if (strict_overflow_p)
4988 fold_overflow_warning (warnmsg,
4989 WARN_STRICT_OVERFLOW_COMPARISON);
4990 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4991 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4992 type, lhs, rhs);
4997 return 0;
5000 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5001 bit value. Arrange things so the extra bits will be set to zero if and
5002 only if C is signed-extended to its full width. If MASK is nonzero,
5003 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5005 static tree
5006 unextend (tree c, int p, int unsignedp, tree mask)
5008 tree type = TREE_TYPE (c);
5009 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5010 tree temp;
5012 if (p == modesize || unsignedp)
5013 return c;
5015 /* We work by getting just the sign bit into the low-order bit, then
5016 into the high-order bit, then sign-extend. We then XOR that value
5017 with C. */
5018 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5019 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5021 /* We must use a signed type in order to get an arithmetic right shift.
5022 However, we must also avoid introducing accidental overflows, so that
5023 a subsequent call to integer_zerop will work. Hence we must
5024 do the type conversion here. At this point, the constant is either
5025 zero or one, and the conversion to a signed type can never overflow.
5026 We could get an overflow if this conversion is done anywhere else. */
5027 if (TYPE_UNSIGNED (type))
5028 temp = fold_convert (signed_type_for (type), temp);
5030 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5031 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5032 if (mask != 0)
5033 temp = const_binop (BIT_AND_EXPR, temp,
5034 fold_convert (TREE_TYPE (c), mask));
5035 /* If necessary, convert the type back to match the type of C. */
5036 if (TYPE_UNSIGNED (type))
5037 temp = fold_convert (type, temp);
5039 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5042 /* For an expression that has the form
5043 (A && B) || ~B
5045 (A || B) && ~B,
5046 we can drop one of the inner expressions and simplify to
5047 A || ~B
5049 A && ~B
5050 LOC is the location of the resulting expression. OP is the inner
5051 logical operation; the left-hand side in the examples above, while CMPOP
5052 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5053 removing a condition that guards another, as in
5054 (A != NULL && A->...) || A == NULL
5055 which we must not transform. If RHS_ONLY is true, only eliminate the
5056 right-most operand of the inner logical operation. */
5058 static tree
5059 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5060 bool rhs_only)
5062 tree type = TREE_TYPE (cmpop);
5063 enum tree_code code = TREE_CODE (cmpop);
5064 enum tree_code truthop_code = TREE_CODE (op);
5065 tree lhs = TREE_OPERAND (op, 0);
5066 tree rhs = TREE_OPERAND (op, 1);
5067 tree orig_lhs = lhs, orig_rhs = rhs;
5068 enum tree_code rhs_code = TREE_CODE (rhs);
5069 enum tree_code lhs_code = TREE_CODE (lhs);
5070 enum tree_code inv_code;
5072 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5073 return NULL_TREE;
5075 if (TREE_CODE_CLASS (code) != tcc_comparison)
5076 return NULL_TREE;
5078 if (rhs_code == truthop_code)
5080 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5081 if (newrhs != NULL_TREE)
5083 rhs = newrhs;
5084 rhs_code = TREE_CODE (rhs);
5087 if (lhs_code == truthop_code && !rhs_only)
5089 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5090 if (newlhs != NULL_TREE)
5092 lhs = newlhs;
5093 lhs_code = TREE_CODE (lhs);
5097 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5098 if (inv_code == rhs_code
5099 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5100 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5101 return lhs;
5102 if (!rhs_only && inv_code == lhs_code
5103 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5104 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5105 return rhs;
5106 if (rhs != orig_rhs || lhs != orig_lhs)
5107 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5108 lhs, rhs);
5109 return NULL_TREE;
5112 /* Find ways of folding logical expressions of LHS and RHS:
5113 Try to merge two comparisons to the same innermost item.
5114 Look for range tests like "ch >= '0' && ch <= '9'".
5115 Look for combinations of simple terms on machines with expensive branches
5116 and evaluate the RHS unconditionally.
5118 For example, if we have p->a == 2 && p->b == 4 and we can make an
5119 object large enough to span both A and B, we can do this with a comparison
5120 against the object ANDed with the a mask.
5122 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5123 operations to do this with one comparison.
5125 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5126 function and the one above.
5128 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5129 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5131 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5132 two operands.
5134 We return the simplified tree or 0 if no optimization is possible. */
5136 static tree
5137 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5138 tree lhs, tree rhs)
5140 /* If this is the "or" of two comparisons, we can do something if
5141 the comparisons are NE_EXPR. If this is the "and", we can do something
5142 if the comparisons are EQ_EXPR. I.e.,
5143 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5145 WANTED_CODE is this operation code. For single bit fields, we can
5146 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5147 comparison for one-bit fields. */
5149 enum tree_code wanted_code;
5150 enum tree_code lcode, rcode;
5151 tree ll_arg, lr_arg, rl_arg, rr_arg;
5152 tree ll_inner, lr_inner, rl_inner, rr_inner;
5153 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5154 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5155 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5156 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5157 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5158 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5159 enum machine_mode lnmode, rnmode;
5160 tree ll_mask, lr_mask, rl_mask, rr_mask;
5161 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5162 tree l_const, r_const;
5163 tree lntype, rntype, result;
5164 HOST_WIDE_INT first_bit, end_bit;
5165 int volatilep;
5167 /* Start by getting the comparison codes. Fail if anything is volatile.
5168 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5169 it were surrounded with a NE_EXPR. */
5171 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5172 return 0;
5174 lcode = TREE_CODE (lhs);
5175 rcode = TREE_CODE (rhs);
5177 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5179 lhs = build2 (NE_EXPR, truth_type, lhs,
5180 build_int_cst (TREE_TYPE (lhs), 0));
5181 lcode = NE_EXPR;
5184 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5186 rhs = build2 (NE_EXPR, truth_type, rhs,
5187 build_int_cst (TREE_TYPE (rhs), 0));
5188 rcode = NE_EXPR;
5191 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5192 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5193 return 0;
5195 ll_arg = TREE_OPERAND (lhs, 0);
5196 lr_arg = TREE_OPERAND (lhs, 1);
5197 rl_arg = TREE_OPERAND (rhs, 0);
5198 rr_arg = TREE_OPERAND (rhs, 1);
5200 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5201 if (simple_operand_p (ll_arg)
5202 && simple_operand_p (lr_arg))
5204 if (operand_equal_p (ll_arg, rl_arg, 0)
5205 && operand_equal_p (lr_arg, rr_arg, 0))
5207 result = combine_comparisons (loc, code, lcode, rcode,
5208 truth_type, ll_arg, lr_arg);
5209 if (result)
5210 return result;
5212 else if (operand_equal_p (ll_arg, rr_arg, 0)
5213 && operand_equal_p (lr_arg, rl_arg, 0))
5215 result = combine_comparisons (loc, code, lcode,
5216 swap_tree_comparison (rcode),
5217 truth_type, ll_arg, lr_arg);
5218 if (result)
5219 return result;
5223 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5224 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5226 /* If the RHS can be evaluated unconditionally and its operands are
5227 simple, it wins to evaluate the RHS unconditionally on machines
5228 with expensive branches. In this case, this isn't a comparison
5229 that can be merged. */
5231 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5232 false) >= 2
5233 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5234 && simple_operand_p (rl_arg)
5235 && simple_operand_p (rr_arg))
5237 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5238 if (code == TRUTH_OR_EXPR
5239 && lcode == NE_EXPR && integer_zerop (lr_arg)
5240 && rcode == NE_EXPR && integer_zerop (rr_arg)
5241 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5242 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5243 return build2_loc (loc, NE_EXPR, truth_type,
5244 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5245 ll_arg, rl_arg),
5246 build_int_cst (TREE_TYPE (ll_arg), 0));
5248 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5249 if (code == TRUTH_AND_EXPR
5250 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5251 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5252 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5253 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5254 return build2_loc (loc, EQ_EXPR, truth_type,
5255 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5256 ll_arg, rl_arg),
5257 build_int_cst (TREE_TYPE (ll_arg), 0));
5260 /* See if the comparisons can be merged. Then get all the parameters for
5261 each side. */
5263 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5264 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5265 return 0;
5267 volatilep = 0;
5268 ll_inner = decode_field_reference (loc, ll_arg,
5269 &ll_bitsize, &ll_bitpos, &ll_mode,
5270 &ll_unsignedp, &volatilep, &ll_mask,
5271 &ll_and_mask);
5272 lr_inner = decode_field_reference (loc, lr_arg,
5273 &lr_bitsize, &lr_bitpos, &lr_mode,
5274 &lr_unsignedp, &volatilep, &lr_mask,
5275 &lr_and_mask);
5276 rl_inner = decode_field_reference (loc, rl_arg,
5277 &rl_bitsize, &rl_bitpos, &rl_mode,
5278 &rl_unsignedp, &volatilep, &rl_mask,
5279 &rl_and_mask);
5280 rr_inner = decode_field_reference (loc, rr_arg,
5281 &rr_bitsize, &rr_bitpos, &rr_mode,
5282 &rr_unsignedp, &volatilep, &rr_mask,
5283 &rr_and_mask);
5285 /* It must be true that the inner operation on the lhs of each
5286 comparison must be the same if we are to be able to do anything.
5287 Then see if we have constants. If not, the same must be true for
5288 the rhs's. */
5289 if (volatilep || ll_inner == 0 || rl_inner == 0
5290 || ! operand_equal_p (ll_inner, rl_inner, 0))
5291 return 0;
5293 if (TREE_CODE (lr_arg) == INTEGER_CST
5294 && TREE_CODE (rr_arg) == INTEGER_CST)
5295 l_const = lr_arg, r_const = rr_arg;
5296 else if (lr_inner == 0 || rr_inner == 0
5297 || ! operand_equal_p (lr_inner, rr_inner, 0))
5298 return 0;
5299 else
5300 l_const = r_const = 0;
5302 /* If either comparison code is not correct for our logical operation,
5303 fail. However, we can convert a one-bit comparison against zero into
5304 the opposite comparison against that bit being set in the field. */
5306 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5307 if (lcode != wanted_code)
5309 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5311 /* Make the left operand unsigned, since we are only interested
5312 in the value of one bit. Otherwise we are doing the wrong
5313 thing below. */
5314 ll_unsignedp = 1;
5315 l_const = ll_mask;
5317 else
5318 return 0;
5321 /* This is analogous to the code for l_const above. */
5322 if (rcode != wanted_code)
5324 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5326 rl_unsignedp = 1;
5327 r_const = rl_mask;
5329 else
5330 return 0;
5333 /* See if we can find a mode that contains both fields being compared on
5334 the left. If we can't, fail. Otherwise, update all constants and masks
5335 to be relative to a field of that size. */
5336 first_bit = MIN (ll_bitpos, rl_bitpos);
5337 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5338 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5339 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5340 volatilep);
5341 if (lnmode == VOIDmode)
5342 return 0;
5344 lnbitsize = GET_MODE_BITSIZE (lnmode);
5345 lnbitpos = first_bit & ~ (lnbitsize - 1);
5346 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5347 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5349 if (BYTES_BIG_ENDIAN)
5351 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5352 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5355 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5356 size_int (xll_bitpos));
5357 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5358 size_int (xrl_bitpos));
5360 if (l_const)
5362 l_const = fold_convert_loc (loc, lntype, l_const);
5363 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5364 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5365 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5366 fold_build1_loc (loc, BIT_NOT_EXPR,
5367 lntype, ll_mask))))
5369 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5371 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5374 if (r_const)
5376 r_const = fold_convert_loc (loc, lntype, r_const);
5377 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5378 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5379 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5380 fold_build1_loc (loc, BIT_NOT_EXPR,
5381 lntype, rl_mask))))
5383 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5385 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5389 /* If the right sides are not constant, do the same for it. Also,
5390 disallow this optimization if a size or signedness mismatch occurs
5391 between the left and right sides. */
5392 if (l_const == 0)
5394 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5395 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5396 /* Make sure the two fields on the right
5397 correspond to the left without being swapped. */
5398 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5399 return 0;
5401 first_bit = MIN (lr_bitpos, rr_bitpos);
5402 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5403 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5404 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5405 volatilep);
5406 if (rnmode == VOIDmode)
5407 return 0;
5409 rnbitsize = GET_MODE_BITSIZE (rnmode);
5410 rnbitpos = first_bit & ~ (rnbitsize - 1);
5411 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5412 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5414 if (BYTES_BIG_ENDIAN)
5416 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5417 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5420 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5421 rntype, lr_mask),
5422 size_int (xlr_bitpos));
5423 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5424 rntype, rr_mask),
5425 size_int (xrr_bitpos));
5427 /* Make a mask that corresponds to both fields being compared.
5428 Do this for both items being compared. If the operands are the
5429 same size and the bits being compared are in the same position
5430 then we can do this by masking both and comparing the masked
5431 results. */
5432 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5433 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5434 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5436 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5437 ll_unsignedp || rl_unsignedp);
5438 if (! all_ones_mask_p (ll_mask, lnbitsize))
5439 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5441 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5442 lr_unsignedp || rr_unsignedp);
5443 if (! all_ones_mask_p (lr_mask, rnbitsize))
5444 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5446 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5449 /* There is still another way we can do something: If both pairs of
5450 fields being compared are adjacent, we may be able to make a wider
5451 field containing them both.
5453 Note that we still must mask the lhs/rhs expressions. Furthermore,
5454 the mask must be shifted to account for the shift done by
5455 make_bit_field_ref. */
5456 if ((ll_bitsize + ll_bitpos == rl_bitpos
5457 && lr_bitsize + lr_bitpos == rr_bitpos)
5458 || (ll_bitpos == rl_bitpos + rl_bitsize
5459 && lr_bitpos == rr_bitpos + rr_bitsize))
5461 tree type;
5463 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5464 ll_bitsize + rl_bitsize,
5465 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5466 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5467 lr_bitsize + rr_bitsize,
5468 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5470 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5471 size_int (MIN (xll_bitpos, xrl_bitpos)));
5472 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5473 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5475 /* Convert to the smaller type before masking out unwanted bits. */
5476 type = lntype;
5477 if (lntype != rntype)
5479 if (lnbitsize > rnbitsize)
5481 lhs = fold_convert_loc (loc, rntype, lhs);
5482 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5483 type = rntype;
5485 else if (lnbitsize < rnbitsize)
5487 rhs = fold_convert_loc (loc, lntype, rhs);
5488 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5489 type = lntype;
5493 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5494 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5496 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5497 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5499 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5502 return 0;
5505 /* Handle the case of comparisons with constants. If there is something in
5506 common between the masks, those bits of the constants must be the same.
5507 If not, the condition is always false. Test for this to avoid generating
5508 incorrect code below. */
5509 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5510 if (! integer_zerop (result)
5511 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5512 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5514 if (wanted_code == NE_EXPR)
5516 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5517 return constant_boolean_node (true, truth_type);
5519 else
5521 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5522 return constant_boolean_node (false, truth_type);
5526 /* Construct the expression we will return. First get the component
5527 reference we will make. Unless the mask is all ones the width of
5528 that field, perform the mask operation. Then compare with the
5529 merged constant. */
5530 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5531 ll_unsignedp || rl_unsignedp);
5533 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5534 if (! all_ones_mask_p (ll_mask, lnbitsize))
5535 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5537 return build2_loc (loc, wanted_code, truth_type, result,
5538 const_binop (BIT_IOR_EXPR, l_const, r_const));
5541 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5542 constant. */
5544 static tree
5545 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5546 tree op0, tree op1)
5548 tree arg0 = op0;
5549 enum tree_code op_code;
5550 tree comp_const;
5551 tree minmax_const;
5552 int consts_equal, consts_lt;
5553 tree inner;
5555 STRIP_SIGN_NOPS (arg0);
5557 op_code = TREE_CODE (arg0);
5558 minmax_const = TREE_OPERAND (arg0, 1);
5559 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5560 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5561 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5562 inner = TREE_OPERAND (arg0, 0);
5564 /* If something does not permit us to optimize, return the original tree. */
5565 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5566 || TREE_CODE (comp_const) != INTEGER_CST
5567 || TREE_OVERFLOW (comp_const)
5568 || TREE_CODE (minmax_const) != INTEGER_CST
5569 || TREE_OVERFLOW (minmax_const))
5570 return NULL_TREE;
5572 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5573 and GT_EXPR, doing the rest with recursive calls using logical
5574 simplifications. */
5575 switch (code)
5577 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5579 tree tem
5580 = optimize_minmax_comparison (loc,
5581 invert_tree_comparison (code, false),
5582 type, op0, op1);
5583 if (tem)
5584 return invert_truthvalue_loc (loc, tem);
5585 return NULL_TREE;
5588 case GE_EXPR:
5589 return
5590 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5591 optimize_minmax_comparison
5592 (loc, EQ_EXPR, type, arg0, comp_const),
5593 optimize_minmax_comparison
5594 (loc, GT_EXPR, type, arg0, comp_const));
5596 case EQ_EXPR:
5597 if (op_code == MAX_EXPR && consts_equal)
5598 /* MAX (X, 0) == 0 -> X <= 0 */
5599 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5601 else if (op_code == MAX_EXPR && consts_lt)
5602 /* MAX (X, 0) == 5 -> X == 5 */
5603 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5605 else if (op_code == MAX_EXPR)
5606 /* MAX (X, 0) == -1 -> false */
5607 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5609 else if (consts_equal)
5610 /* MIN (X, 0) == 0 -> X >= 0 */
5611 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5613 else if (consts_lt)
5614 /* MIN (X, 0) == 5 -> false */
5615 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5617 else
5618 /* MIN (X, 0) == -1 -> X == -1 */
5619 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5621 case GT_EXPR:
5622 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5623 /* MAX (X, 0) > 0 -> X > 0
5624 MAX (X, 0) > 5 -> X > 5 */
5625 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5627 else if (op_code == MAX_EXPR)
5628 /* MAX (X, 0) > -1 -> true */
5629 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5631 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5632 /* MIN (X, 0) > 0 -> false
5633 MIN (X, 0) > 5 -> false */
5634 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5636 else
5637 /* MIN (X, 0) > -1 -> X > -1 */
5638 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5640 default:
5641 return NULL_TREE;
5645 /* T is an integer expression that is being multiplied, divided, or taken a
5646 modulus (CODE says which and what kind of divide or modulus) by a
5647 constant C. See if we can eliminate that operation by folding it with
5648 other operations already in T. WIDE_TYPE, if non-null, is a type that
5649 should be used for the computation if wider than our type.
5651 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5652 (X * 2) + (Y * 4). We must, however, be assured that either the original
5653 expression would not overflow or that overflow is undefined for the type
5654 in the language in question.
5656 If we return a non-null expression, it is an equivalent form of the
5657 original computation, but need not be in the original type.
5659 We set *STRICT_OVERFLOW_P to true if the return values depends on
5660 signed overflow being undefined. Otherwise we do not change
5661 *STRICT_OVERFLOW_P. */
5663 static tree
5664 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5665 bool *strict_overflow_p)
5667 /* To avoid exponential search depth, refuse to allow recursion past
5668 three levels. Beyond that (1) it's highly unlikely that we'll find
5669 something interesting and (2) we've probably processed it before
5670 when we built the inner expression. */
5672 static int depth;
5673 tree ret;
5675 if (depth > 3)
5676 return NULL;
5678 depth++;
5679 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5680 depth--;
5682 return ret;
5685 static tree
5686 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5687 bool *strict_overflow_p)
5689 tree type = TREE_TYPE (t);
5690 enum tree_code tcode = TREE_CODE (t);
5691 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5692 > GET_MODE_SIZE (TYPE_MODE (type)))
5693 ? wide_type : type);
5694 tree t1, t2;
5695 int same_p = tcode == code;
5696 tree op0 = NULL_TREE, op1 = NULL_TREE;
5697 bool sub_strict_overflow_p;
5699 /* Don't deal with constants of zero here; they confuse the code below. */
5700 if (integer_zerop (c))
5701 return NULL_TREE;
5703 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5704 op0 = TREE_OPERAND (t, 0);
5706 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5707 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5709 /* Note that we need not handle conditional operations here since fold
5710 already handles those cases. So just do arithmetic here. */
5711 switch (tcode)
5713 case INTEGER_CST:
5714 /* For a constant, we can always simplify if we are a multiply
5715 or (for divide and modulus) if it is a multiple of our constant. */
5716 if (code == MULT_EXPR
5717 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5718 return const_binop (code, fold_convert (ctype, t),
5719 fold_convert (ctype, c));
5720 break;
5722 CASE_CONVERT: case NON_LVALUE_EXPR:
5723 /* If op0 is an expression ... */
5724 if ((COMPARISON_CLASS_P (op0)
5725 || UNARY_CLASS_P (op0)
5726 || BINARY_CLASS_P (op0)
5727 || VL_EXP_CLASS_P (op0)
5728 || EXPRESSION_CLASS_P (op0))
5729 /* ... and has wrapping overflow, and its type is smaller
5730 than ctype, then we cannot pass through as widening. */
5731 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5732 && (TYPE_PRECISION (ctype)
5733 > TYPE_PRECISION (TREE_TYPE (op0))))
5734 /* ... or this is a truncation (t is narrower than op0),
5735 then we cannot pass through this narrowing. */
5736 || (TYPE_PRECISION (type)
5737 < TYPE_PRECISION (TREE_TYPE (op0)))
5738 /* ... or signedness changes for division or modulus,
5739 then we cannot pass through this conversion. */
5740 || (code != MULT_EXPR
5741 && (TYPE_UNSIGNED (ctype)
5742 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5743 /* ... or has undefined overflow while the converted to
5744 type has not, we cannot do the operation in the inner type
5745 as that would introduce undefined overflow. */
5746 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5747 && !TYPE_OVERFLOW_UNDEFINED (type))))
5748 break;
5750 /* Pass the constant down and see if we can make a simplification. If
5751 we can, replace this expression with the inner simplification for
5752 possible later conversion to our or some other type. */
5753 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5754 && TREE_CODE (t2) == INTEGER_CST
5755 && !TREE_OVERFLOW (t2)
5756 && (0 != (t1 = extract_muldiv (op0, t2, code,
5757 code == MULT_EXPR
5758 ? ctype : NULL_TREE,
5759 strict_overflow_p))))
5760 return t1;
5761 break;
5763 case ABS_EXPR:
5764 /* If widening the type changes it from signed to unsigned, then we
5765 must avoid building ABS_EXPR itself as unsigned. */
5766 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5768 tree cstype = (*signed_type_for) (ctype);
5769 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5770 != 0)
5772 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5773 return fold_convert (ctype, t1);
5775 break;
5777 /* If the constant is negative, we cannot simplify this. */
5778 if (tree_int_cst_sgn (c) == -1)
5779 break;
5780 /* FALLTHROUGH */
5781 case NEGATE_EXPR:
5782 /* For division and modulus, type can't be unsigned, as e.g.
5783 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5784 For signed types, even with wrapping overflow, this is fine. */
5785 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5786 break;
5787 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5788 != 0)
5789 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5790 break;
5792 case MIN_EXPR: case MAX_EXPR:
5793 /* If widening the type changes the signedness, then we can't perform
5794 this optimization as that changes the result. */
5795 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5796 break;
5798 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5799 sub_strict_overflow_p = false;
5800 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5801 &sub_strict_overflow_p)) != 0
5802 && (t2 = extract_muldiv (op1, c, code, wide_type,
5803 &sub_strict_overflow_p)) != 0)
5805 if (tree_int_cst_sgn (c) < 0)
5806 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5807 if (sub_strict_overflow_p)
5808 *strict_overflow_p = true;
5809 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5810 fold_convert (ctype, t2));
5812 break;
5814 case LSHIFT_EXPR: case RSHIFT_EXPR:
5815 /* If the second operand is constant, this is a multiplication
5816 or floor division, by a power of two, so we can treat it that
5817 way unless the multiplier or divisor overflows. Signed
5818 left-shift overflow is implementation-defined rather than
5819 undefined in C90, so do not convert signed left shift into
5820 multiplication. */
5821 if (TREE_CODE (op1) == INTEGER_CST
5822 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5823 /* const_binop may not detect overflow correctly,
5824 so check for it explicitly here. */
5825 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5826 && TREE_INT_CST_HIGH (op1) == 0
5827 && 0 != (t1 = fold_convert (ctype,
5828 const_binop (LSHIFT_EXPR,
5829 size_one_node,
5830 op1)))
5831 && !TREE_OVERFLOW (t1))
5832 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5833 ? MULT_EXPR : FLOOR_DIV_EXPR,
5834 ctype,
5835 fold_convert (ctype, op0),
5836 t1),
5837 c, code, wide_type, strict_overflow_p);
5838 break;
5840 case PLUS_EXPR: case MINUS_EXPR:
5841 /* See if we can eliminate the operation on both sides. If we can, we
5842 can return a new PLUS or MINUS. If we can't, the only remaining
5843 cases where we can do anything are if the second operand is a
5844 constant. */
5845 sub_strict_overflow_p = false;
5846 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5847 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5848 if (t1 != 0 && t2 != 0
5849 && (code == MULT_EXPR
5850 /* If not multiplication, we can only do this if both operands
5851 are divisible by c. */
5852 || (multiple_of_p (ctype, op0, c)
5853 && multiple_of_p (ctype, op1, c))))
5855 if (sub_strict_overflow_p)
5856 *strict_overflow_p = true;
5857 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5858 fold_convert (ctype, t2));
5861 /* If this was a subtraction, negate OP1 and set it to be an addition.
5862 This simplifies the logic below. */
5863 if (tcode == MINUS_EXPR)
5865 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5866 /* If OP1 was not easily negatable, the constant may be OP0. */
5867 if (TREE_CODE (op0) == INTEGER_CST)
5869 tree tem = op0;
5870 op0 = op1;
5871 op1 = tem;
5872 tem = t1;
5873 t1 = t2;
5874 t2 = tem;
5878 if (TREE_CODE (op1) != INTEGER_CST)
5879 break;
5881 /* If either OP1 or C are negative, this optimization is not safe for
5882 some of the division and remainder types while for others we need
5883 to change the code. */
5884 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5886 if (code == CEIL_DIV_EXPR)
5887 code = FLOOR_DIV_EXPR;
5888 else if (code == FLOOR_DIV_EXPR)
5889 code = CEIL_DIV_EXPR;
5890 else if (code != MULT_EXPR
5891 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5892 break;
5895 /* If it's a multiply or a division/modulus operation of a multiple
5896 of our constant, do the operation and verify it doesn't overflow. */
5897 if (code == MULT_EXPR
5898 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5900 op1 = const_binop (code, fold_convert (ctype, op1),
5901 fold_convert (ctype, c));
5902 /* We allow the constant to overflow with wrapping semantics. */
5903 if (op1 == 0
5904 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5905 break;
5907 else
5908 break;
5910 /* If we have an unsigned type, we cannot widen the operation since it
5911 will change the result if the original computation overflowed. */
5912 if (TYPE_UNSIGNED (ctype) && ctype != type)
5913 break;
5915 /* If we were able to eliminate our operation from the first side,
5916 apply our operation to the second side and reform the PLUS. */
5917 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5918 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5920 /* The last case is if we are a multiply. In that case, we can
5921 apply the distributive law to commute the multiply and addition
5922 if the multiplication of the constants doesn't overflow
5923 and overflow is defined. With undefined overflow
5924 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5925 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5926 return fold_build2 (tcode, ctype,
5927 fold_build2 (code, ctype,
5928 fold_convert (ctype, op0),
5929 fold_convert (ctype, c)),
5930 op1);
5932 break;
5934 case MULT_EXPR:
5935 /* We have a special case here if we are doing something like
5936 (C * 8) % 4 since we know that's zero. */
5937 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5938 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5939 /* If the multiplication can overflow we cannot optimize this. */
5940 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5941 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5942 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5944 *strict_overflow_p = true;
5945 return omit_one_operand (type, integer_zero_node, op0);
5948 /* ... fall through ... */
5950 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5951 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5952 /* If we can extract our operation from the LHS, do so and return a
5953 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5954 do something only if the second operand is a constant. */
5955 if (same_p
5956 && (t1 = extract_muldiv (op0, c, code, wide_type,
5957 strict_overflow_p)) != 0)
5958 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5959 fold_convert (ctype, op1));
5960 else if (tcode == MULT_EXPR && code == MULT_EXPR
5961 && (t1 = extract_muldiv (op1, c, code, wide_type,
5962 strict_overflow_p)) != 0)
5963 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5964 fold_convert (ctype, t1));
5965 else if (TREE_CODE (op1) != INTEGER_CST)
5966 return 0;
5968 /* If these are the same operation types, we can associate them
5969 assuming no overflow. */
5970 if (tcode == code)
5972 double_int mul;
5973 bool overflow_p;
5974 unsigned prec = TYPE_PRECISION (ctype);
5975 bool uns = TYPE_UNSIGNED (ctype);
5976 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5977 double_int dic = tree_to_double_int (c).ext (prec, uns);
5978 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5979 overflow_p = ((!uns && overflow_p)
5980 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5981 if (!double_int_fits_to_tree_p (ctype, mul)
5982 && ((uns && tcode != MULT_EXPR) || !uns))
5983 overflow_p = 1;
5984 if (!overflow_p)
5985 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5986 double_int_to_tree (ctype, mul));
5989 /* If these operations "cancel" each other, we have the main
5990 optimizations of this pass, which occur when either constant is a
5991 multiple of the other, in which case we replace this with either an
5992 operation or CODE or TCODE.
5994 If we have an unsigned type, we cannot do this since it will change
5995 the result if the original computation overflowed. */
5996 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5997 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5998 || (tcode == MULT_EXPR
5999 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6000 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6001 && code != MULT_EXPR)))
6003 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6005 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6006 *strict_overflow_p = true;
6007 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6008 fold_convert (ctype,
6009 const_binop (TRUNC_DIV_EXPR,
6010 op1, c)));
6012 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6014 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6015 *strict_overflow_p = true;
6016 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6017 fold_convert (ctype,
6018 const_binop (TRUNC_DIV_EXPR,
6019 c, op1)));
6022 break;
6024 default:
6025 break;
6028 return 0;
6031 /* Return a node which has the indicated constant VALUE (either 0 or
6032 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6033 and is of the indicated TYPE. */
6035 tree
6036 constant_boolean_node (bool value, tree type)
6038 if (type == integer_type_node)
6039 return value ? integer_one_node : integer_zero_node;
6040 else if (type == boolean_type_node)
6041 return value ? boolean_true_node : boolean_false_node;
6042 else if (TREE_CODE (type) == VECTOR_TYPE)
6043 return build_vector_from_val (type,
6044 build_int_cst (TREE_TYPE (type),
6045 value ? -1 : 0));
6046 else
6047 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6051 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6052 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6053 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6054 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6055 COND is the first argument to CODE; otherwise (as in the example
6056 given here), it is the second argument. TYPE is the type of the
6057 original expression. Return NULL_TREE if no simplification is
6058 possible. */
6060 static tree
6061 fold_binary_op_with_conditional_arg (location_t loc,
6062 enum tree_code code,
6063 tree type, tree op0, tree op1,
6064 tree cond, tree arg, int cond_first_p)
6066 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6067 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6068 tree test, true_value, false_value;
6069 tree lhs = NULL_TREE;
6070 tree rhs = NULL_TREE;
6071 enum tree_code cond_code = COND_EXPR;
6073 if (TREE_CODE (cond) == COND_EXPR
6074 || TREE_CODE (cond) == VEC_COND_EXPR)
6076 test = TREE_OPERAND (cond, 0);
6077 true_value = TREE_OPERAND (cond, 1);
6078 false_value = TREE_OPERAND (cond, 2);
6079 /* If this operand throws an expression, then it does not make
6080 sense to try to perform a logical or arithmetic operation
6081 involving it. */
6082 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6083 lhs = true_value;
6084 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6085 rhs = false_value;
6087 else
6089 tree testtype = TREE_TYPE (cond);
6090 test = cond;
6091 true_value = constant_boolean_node (true, testtype);
6092 false_value = constant_boolean_node (false, testtype);
6095 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6096 cond_code = VEC_COND_EXPR;
6098 /* This transformation is only worthwhile if we don't have to wrap ARG
6099 in a SAVE_EXPR and the operation can be simplified without recursing
6100 on at least one of the branches once its pushed inside the COND_EXPR. */
6101 if (!TREE_CONSTANT (arg)
6102 && (TREE_SIDE_EFFECTS (arg)
6103 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6104 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6105 return NULL_TREE;
6107 arg = fold_convert_loc (loc, arg_type, arg);
6108 if (lhs == 0)
6110 true_value = fold_convert_loc (loc, cond_type, true_value);
6111 if (cond_first_p)
6112 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6113 else
6114 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6116 if (rhs == 0)
6118 false_value = fold_convert_loc (loc, cond_type, false_value);
6119 if (cond_first_p)
6120 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6121 else
6122 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6125 /* Check that we have simplified at least one of the branches. */
6126 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6127 return NULL_TREE;
6129 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6133 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6135 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6136 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6137 ADDEND is the same as X.
6139 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6140 and finite. The problematic cases are when X is zero, and its mode
6141 has signed zeros. In the case of rounding towards -infinity,
6142 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6143 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6145 bool
6146 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6148 if (!real_zerop (addend))
6149 return false;
6151 /* Don't allow the fold with -fsignaling-nans. */
6152 if (HONOR_SNANS (TYPE_MODE (type)))
6153 return false;
6155 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6156 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6157 return true;
6159 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6160 if (TREE_CODE (addend) == REAL_CST
6161 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6162 negate = !negate;
6164 /* The mode has signed zeros, and we have to honor their sign.
6165 In this situation, there is only one case we can return true for.
6166 X - 0 is the same as X unless rounding towards -infinity is
6167 supported. */
6168 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6171 /* Subroutine of fold() that checks comparisons of built-in math
6172 functions against real constants.
6174 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6175 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6176 is the type of the result and ARG0 and ARG1 are the operands of the
6177 comparison. ARG1 must be a TREE_REAL_CST.
6179 The function returns the constant folded tree if a simplification
6180 can be made, and NULL_TREE otherwise. */
6182 static tree
6183 fold_mathfn_compare (location_t loc,
6184 enum built_in_function fcode, enum tree_code code,
6185 tree type, tree arg0, tree arg1)
6187 REAL_VALUE_TYPE c;
6189 if (BUILTIN_SQRT_P (fcode))
6191 tree arg = CALL_EXPR_ARG (arg0, 0);
6192 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6194 c = TREE_REAL_CST (arg1);
6195 if (REAL_VALUE_NEGATIVE (c))
6197 /* sqrt(x) < y is always false, if y is negative. */
6198 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6199 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6201 /* sqrt(x) > y is always true, if y is negative and we
6202 don't care about NaNs, i.e. negative values of x. */
6203 if (code == NE_EXPR || !HONOR_NANS (mode))
6204 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6206 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6207 return fold_build2_loc (loc, GE_EXPR, type, arg,
6208 build_real (TREE_TYPE (arg), dconst0));
6210 else if (code == GT_EXPR || code == GE_EXPR)
6212 REAL_VALUE_TYPE c2;
6214 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6215 real_convert (&c2, mode, &c2);
6217 if (REAL_VALUE_ISINF (c2))
6219 /* sqrt(x) > y is x == +Inf, when y is very large. */
6220 if (HONOR_INFINITIES (mode))
6221 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6222 build_real (TREE_TYPE (arg), c2));
6224 /* sqrt(x) > y is always false, when y is very large
6225 and we don't care about infinities. */
6226 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6229 /* sqrt(x) > c is the same as x > c*c. */
6230 return fold_build2_loc (loc, code, type, arg,
6231 build_real (TREE_TYPE (arg), c2));
6233 else if (code == LT_EXPR || code == LE_EXPR)
6235 REAL_VALUE_TYPE c2;
6237 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6238 real_convert (&c2, mode, &c2);
6240 if (REAL_VALUE_ISINF (c2))
6242 /* sqrt(x) < y is always true, when y is a very large
6243 value and we don't care about NaNs or Infinities. */
6244 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6245 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6247 /* sqrt(x) < y is x != +Inf when y is very large and we
6248 don't care about NaNs. */
6249 if (! HONOR_NANS (mode))
6250 return fold_build2_loc (loc, NE_EXPR, type, arg,
6251 build_real (TREE_TYPE (arg), c2));
6253 /* sqrt(x) < y is x >= 0 when y is very large and we
6254 don't care about Infinities. */
6255 if (! HONOR_INFINITIES (mode))
6256 return fold_build2_loc (loc, GE_EXPR, type, arg,
6257 build_real (TREE_TYPE (arg), dconst0));
6259 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6260 arg = save_expr (arg);
6261 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6262 fold_build2_loc (loc, GE_EXPR, type, arg,
6263 build_real (TREE_TYPE (arg),
6264 dconst0)),
6265 fold_build2_loc (loc, NE_EXPR, type, arg,
6266 build_real (TREE_TYPE (arg),
6267 c2)));
6270 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6271 if (! HONOR_NANS (mode))
6272 return fold_build2_loc (loc, code, type, arg,
6273 build_real (TREE_TYPE (arg), c2));
6275 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6276 arg = save_expr (arg);
6277 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6278 fold_build2_loc (loc, GE_EXPR, type, arg,
6279 build_real (TREE_TYPE (arg),
6280 dconst0)),
6281 fold_build2_loc (loc, code, type, arg,
6282 build_real (TREE_TYPE (arg),
6283 c2)));
6287 return NULL_TREE;
6290 /* Subroutine of fold() that optimizes comparisons against Infinities,
6291 either +Inf or -Inf.
6293 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6294 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6295 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6297 The function returns the constant folded tree if a simplification
6298 can be made, and NULL_TREE otherwise. */
6300 static tree
6301 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6302 tree arg0, tree arg1)
6304 enum machine_mode mode;
6305 REAL_VALUE_TYPE max;
6306 tree temp;
6307 bool neg;
6309 mode = TYPE_MODE (TREE_TYPE (arg0));
6311 /* For negative infinity swap the sense of the comparison. */
6312 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6313 if (neg)
6314 code = swap_tree_comparison (code);
6316 switch (code)
6318 case GT_EXPR:
6319 /* x > +Inf is always false, if with ignore sNANs. */
6320 if (HONOR_SNANS (mode))
6321 return NULL_TREE;
6322 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6324 case LE_EXPR:
6325 /* x <= +Inf is always true, if we don't case about NaNs. */
6326 if (! HONOR_NANS (mode))
6327 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6329 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6330 arg0 = save_expr (arg0);
6331 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6333 case EQ_EXPR:
6334 case GE_EXPR:
6335 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6336 real_maxval (&max, neg, mode);
6337 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6338 arg0, build_real (TREE_TYPE (arg0), max));
6340 case LT_EXPR:
6341 /* x < +Inf is always equal to x <= DBL_MAX. */
6342 real_maxval (&max, neg, mode);
6343 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6344 arg0, build_real (TREE_TYPE (arg0), max));
6346 case NE_EXPR:
6347 /* x != +Inf is always equal to !(x > DBL_MAX). */
6348 real_maxval (&max, neg, mode);
6349 if (! HONOR_NANS (mode))
6350 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6351 arg0, build_real (TREE_TYPE (arg0), max));
6353 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6354 arg0, build_real (TREE_TYPE (arg0), max));
6355 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6357 default:
6358 break;
6361 return NULL_TREE;
6364 /* Subroutine of fold() that optimizes comparisons of a division by
6365 a nonzero integer constant against an integer constant, i.e.
6366 X/C1 op C2.
6368 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6369 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6370 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6372 The function returns the constant folded tree if a simplification
6373 can be made, and NULL_TREE otherwise. */
6375 static tree
6376 fold_div_compare (location_t loc,
6377 enum tree_code code, tree type, tree arg0, tree arg1)
6379 tree prod, tmp, hi, lo;
6380 tree arg00 = TREE_OPERAND (arg0, 0);
6381 tree arg01 = TREE_OPERAND (arg0, 1);
6382 double_int val;
6383 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6384 bool neg_overflow;
6385 bool overflow;
6387 /* We have to do this the hard way to detect unsigned overflow.
6388 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6389 val = TREE_INT_CST (arg01)
6390 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6391 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6392 neg_overflow = false;
6394 if (unsigned_p)
6396 tmp = int_const_binop (MINUS_EXPR, arg01,
6397 build_int_cst (TREE_TYPE (arg01), 1));
6398 lo = prod;
6400 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6401 val = TREE_INT_CST (prod)
6402 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6403 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6404 -1, overflow | TREE_OVERFLOW (prod));
6406 else if (tree_int_cst_sgn (arg01) >= 0)
6408 tmp = int_const_binop (MINUS_EXPR, arg01,
6409 build_int_cst (TREE_TYPE (arg01), 1));
6410 switch (tree_int_cst_sgn (arg1))
6412 case -1:
6413 neg_overflow = true;
6414 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6415 hi = prod;
6416 break;
6418 case 0:
6419 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6420 hi = tmp;
6421 break;
6423 case 1:
6424 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6425 lo = prod;
6426 break;
6428 default:
6429 gcc_unreachable ();
6432 else
6434 /* A negative divisor reverses the relational operators. */
6435 code = swap_tree_comparison (code);
6437 tmp = int_const_binop (PLUS_EXPR, arg01,
6438 build_int_cst (TREE_TYPE (arg01), 1));
6439 switch (tree_int_cst_sgn (arg1))
6441 case -1:
6442 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6443 lo = prod;
6444 break;
6446 case 0:
6447 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6448 lo = tmp;
6449 break;
6451 case 1:
6452 neg_overflow = true;
6453 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6454 hi = prod;
6455 break;
6457 default:
6458 gcc_unreachable ();
6462 switch (code)
6464 case EQ_EXPR:
6465 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6466 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6467 if (TREE_OVERFLOW (hi))
6468 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6469 if (TREE_OVERFLOW (lo))
6470 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6471 return build_range_check (loc, type, arg00, 1, lo, hi);
6473 case NE_EXPR:
6474 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6475 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6476 if (TREE_OVERFLOW (hi))
6477 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6478 if (TREE_OVERFLOW (lo))
6479 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6480 return build_range_check (loc, type, arg00, 0, lo, hi);
6482 case LT_EXPR:
6483 if (TREE_OVERFLOW (lo))
6485 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6486 return omit_one_operand_loc (loc, type, tmp, arg00);
6488 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6490 case LE_EXPR:
6491 if (TREE_OVERFLOW (hi))
6493 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6494 return omit_one_operand_loc (loc, type, tmp, arg00);
6496 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6498 case GT_EXPR:
6499 if (TREE_OVERFLOW (hi))
6501 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6502 return omit_one_operand_loc (loc, type, tmp, arg00);
6504 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6506 case GE_EXPR:
6507 if (TREE_OVERFLOW (lo))
6509 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6510 return omit_one_operand_loc (loc, type, tmp, arg00);
6512 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6514 default:
6515 break;
6518 return NULL_TREE;
6522 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6523 equality/inequality test, then return a simplified form of the test
6524 using a sign testing. Otherwise return NULL. TYPE is the desired
6525 result type. */
6527 static tree
6528 fold_single_bit_test_into_sign_test (location_t loc,
6529 enum tree_code code, tree arg0, tree arg1,
6530 tree result_type)
6532 /* If this is testing a single bit, we can optimize the test. */
6533 if ((code == NE_EXPR || code == EQ_EXPR)
6534 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6535 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6537 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6538 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6539 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6541 if (arg00 != NULL_TREE
6542 /* This is only a win if casting to a signed type is cheap,
6543 i.e. when arg00's type is not a partial mode. */
6544 && TYPE_PRECISION (TREE_TYPE (arg00))
6545 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6547 tree stype = signed_type_for (TREE_TYPE (arg00));
6548 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6549 result_type,
6550 fold_convert_loc (loc, stype, arg00),
6551 build_int_cst (stype, 0));
6555 return NULL_TREE;
6558 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6559 equality/inequality test, then return a simplified form of
6560 the test using shifts and logical operations. Otherwise return
6561 NULL. TYPE is the desired result type. */
6563 tree
6564 fold_single_bit_test (location_t loc, enum tree_code code,
6565 tree arg0, tree arg1, tree result_type)
6567 /* If this is testing a single bit, we can optimize the test. */
6568 if ((code == NE_EXPR || code == EQ_EXPR)
6569 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6570 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6572 tree inner = TREE_OPERAND (arg0, 0);
6573 tree type = TREE_TYPE (arg0);
6574 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6575 enum machine_mode operand_mode = TYPE_MODE (type);
6576 int ops_unsigned;
6577 tree signed_type, unsigned_type, intermediate_type;
6578 tree tem, one;
6580 /* First, see if we can fold the single bit test into a sign-bit
6581 test. */
6582 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6583 result_type);
6584 if (tem)
6585 return tem;
6587 /* Otherwise we have (A & C) != 0 where C is a single bit,
6588 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6589 Similarly for (A & C) == 0. */
6591 /* If INNER is a right shift of a constant and it plus BITNUM does
6592 not overflow, adjust BITNUM and INNER. */
6593 if (TREE_CODE (inner) == RSHIFT_EXPR
6594 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6595 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6596 && bitnum < TYPE_PRECISION (type)
6597 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6598 bitnum - TYPE_PRECISION (type)))
6600 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6601 inner = TREE_OPERAND (inner, 0);
6604 /* If we are going to be able to omit the AND below, we must do our
6605 operations as unsigned. If we must use the AND, we have a choice.
6606 Normally unsigned is faster, but for some machines signed is. */
6607 #ifdef LOAD_EXTEND_OP
6608 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6609 && !flag_syntax_only) ? 0 : 1;
6610 #else
6611 ops_unsigned = 1;
6612 #endif
6614 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6615 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6616 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6617 inner = fold_convert_loc (loc, intermediate_type, inner);
6619 if (bitnum != 0)
6620 inner = build2 (RSHIFT_EXPR, intermediate_type,
6621 inner, size_int (bitnum));
6623 one = build_int_cst (intermediate_type, 1);
6625 if (code == EQ_EXPR)
6626 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6628 /* Put the AND last so it can combine with more things. */
6629 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6631 /* Make sure to return the proper type. */
6632 inner = fold_convert_loc (loc, result_type, inner);
6634 return inner;
6636 return NULL_TREE;
6639 /* Check whether we are allowed to reorder operands arg0 and arg1,
6640 such that the evaluation of arg1 occurs before arg0. */
6642 static bool
6643 reorder_operands_p (const_tree arg0, const_tree arg1)
6645 if (! flag_evaluation_order)
6646 return true;
6647 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6648 return true;
6649 return ! TREE_SIDE_EFFECTS (arg0)
6650 && ! TREE_SIDE_EFFECTS (arg1);
6653 /* Test whether it is preferable two swap two operands, ARG0 and
6654 ARG1, for example because ARG0 is an integer constant and ARG1
6655 isn't. If REORDER is true, only recommend swapping if we can
6656 evaluate the operands in reverse order. */
6658 bool
6659 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6661 STRIP_SIGN_NOPS (arg0);
6662 STRIP_SIGN_NOPS (arg1);
6664 if (TREE_CODE (arg1) == INTEGER_CST)
6665 return 0;
6666 if (TREE_CODE (arg0) == INTEGER_CST)
6667 return 1;
6669 if (TREE_CODE (arg1) == REAL_CST)
6670 return 0;
6671 if (TREE_CODE (arg0) == REAL_CST)
6672 return 1;
6674 if (TREE_CODE (arg1) == FIXED_CST)
6675 return 0;
6676 if (TREE_CODE (arg0) == FIXED_CST)
6677 return 1;
6679 if (TREE_CODE (arg1) == COMPLEX_CST)
6680 return 0;
6681 if (TREE_CODE (arg0) == COMPLEX_CST)
6682 return 1;
6684 if (TREE_CONSTANT (arg1))
6685 return 0;
6686 if (TREE_CONSTANT (arg0))
6687 return 1;
6689 if (optimize_function_for_size_p (cfun))
6690 return 0;
6692 if (reorder && flag_evaluation_order
6693 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6694 return 0;
6696 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6697 for commutative and comparison operators. Ensuring a canonical
6698 form allows the optimizers to find additional redundancies without
6699 having to explicitly check for both orderings. */
6700 if (TREE_CODE (arg0) == SSA_NAME
6701 && TREE_CODE (arg1) == SSA_NAME
6702 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6703 return 1;
6705 /* Put SSA_NAMEs last. */
6706 if (TREE_CODE (arg1) == SSA_NAME)
6707 return 0;
6708 if (TREE_CODE (arg0) == SSA_NAME)
6709 return 1;
6711 /* Put variables last. */
6712 if (DECL_P (arg1))
6713 return 0;
6714 if (DECL_P (arg0))
6715 return 1;
6717 return 0;
6720 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6721 ARG0 is extended to a wider type. */
6723 static tree
6724 fold_widened_comparison (location_t loc, enum tree_code code,
6725 tree type, tree arg0, tree arg1)
6727 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6728 tree arg1_unw;
6729 tree shorter_type, outer_type;
6730 tree min, max;
6731 bool above, below;
6733 if (arg0_unw == arg0)
6734 return NULL_TREE;
6735 shorter_type = TREE_TYPE (arg0_unw);
6737 #ifdef HAVE_canonicalize_funcptr_for_compare
6738 /* Disable this optimization if we're casting a function pointer
6739 type on targets that require function pointer canonicalization. */
6740 if (HAVE_canonicalize_funcptr_for_compare
6741 && TREE_CODE (shorter_type) == POINTER_TYPE
6742 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6743 return NULL_TREE;
6744 #endif
6746 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6747 return NULL_TREE;
6749 arg1_unw = get_unwidened (arg1, NULL_TREE);
6751 /* If possible, express the comparison in the shorter mode. */
6752 if ((code == EQ_EXPR || code == NE_EXPR
6753 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6754 && (TREE_TYPE (arg1_unw) == shorter_type
6755 || ((TYPE_PRECISION (shorter_type)
6756 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6757 && (TYPE_UNSIGNED (shorter_type)
6758 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6759 || (TREE_CODE (arg1_unw) == INTEGER_CST
6760 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6761 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6762 && int_fits_type_p (arg1_unw, shorter_type))))
6763 return fold_build2_loc (loc, code, type, arg0_unw,
6764 fold_convert_loc (loc, shorter_type, arg1_unw));
6766 if (TREE_CODE (arg1_unw) != INTEGER_CST
6767 || TREE_CODE (shorter_type) != INTEGER_TYPE
6768 || !int_fits_type_p (arg1_unw, shorter_type))
6769 return NULL_TREE;
6771 /* If we are comparing with the integer that does not fit into the range
6772 of the shorter type, the result is known. */
6773 outer_type = TREE_TYPE (arg1_unw);
6774 min = lower_bound_in_type (outer_type, shorter_type);
6775 max = upper_bound_in_type (outer_type, shorter_type);
6777 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6778 max, arg1_unw));
6779 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6780 arg1_unw, min));
6782 switch (code)
6784 case EQ_EXPR:
6785 if (above || below)
6786 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6787 break;
6789 case NE_EXPR:
6790 if (above || below)
6791 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6792 break;
6794 case LT_EXPR:
6795 case LE_EXPR:
6796 if (above)
6797 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6798 else if (below)
6799 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6801 case GT_EXPR:
6802 case GE_EXPR:
6803 if (above)
6804 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6805 else if (below)
6806 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6808 default:
6809 break;
6812 return NULL_TREE;
6815 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6816 ARG0 just the signedness is changed. */
6818 static tree
6819 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6820 tree arg0, tree arg1)
6822 tree arg0_inner;
6823 tree inner_type, outer_type;
6825 if (!CONVERT_EXPR_P (arg0))
6826 return NULL_TREE;
6828 outer_type = TREE_TYPE (arg0);
6829 arg0_inner = TREE_OPERAND (arg0, 0);
6830 inner_type = TREE_TYPE (arg0_inner);
6832 #ifdef HAVE_canonicalize_funcptr_for_compare
6833 /* Disable this optimization if we're casting a function pointer
6834 type on targets that require function pointer canonicalization. */
6835 if (HAVE_canonicalize_funcptr_for_compare
6836 && TREE_CODE (inner_type) == POINTER_TYPE
6837 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6838 return NULL_TREE;
6839 #endif
6841 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6842 return NULL_TREE;
6844 if (TREE_CODE (arg1) != INTEGER_CST
6845 && !(CONVERT_EXPR_P (arg1)
6846 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6847 return NULL_TREE;
6849 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6850 && code != NE_EXPR
6851 && code != EQ_EXPR)
6852 return NULL_TREE;
6854 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6855 return NULL_TREE;
6857 if (TREE_CODE (arg1) == INTEGER_CST)
6858 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6859 0, TREE_OVERFLOW (arg1));
6860 else
6861 arg1 = fold_convert_loc (loc, inner_type, arg1);
6863 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6866 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6867 step of the array. Reconstructs s and delta in the case of s *
6868 delta being an integer constant (and thus already folded). ADDR is
6869 the address. MULT is the multiplicative expression. If the
6870 function succeeds, the new address expression is returned.
6871 Otherwise NULL_TREE is returned. LOC is the location of the
6872 resulting expression. */
6874 static tree
6875 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6877 tree s, delta, step;
6878 tree ref = TREE_OPERAND (addr, 0), pref;
6879 tree ret, pos;
6880 tree itype;
6881 bool mdim = false;
6883 /* Strip the nops that might be added when converting op1 to sizetype. */
6884 STRIP_NOPS (op1);
6886 /* Canonicalize op1 into a possibly non-constant delta
6887 and an INTEGER_CST s. */
6888 if (TREE_CODE (op1) == MULT_EXPR)
6890 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6892 STRIP_NOPS (arg0);
6893 STRIP_NOPS (arg1);
6895 if (TREE_CODE (arg0) == INTEGER_CST)
6897 s = arg0;
6898 delta = arg1;
6900 else if (TREE_CODE (arg1) == INTEGER_CST)
6902 s = arg1;
6903 delta = arg0;
6905 else
6906 return NULL_TREE;
6908 else if (TREE_CODE (op1) == INTEGER_CST)
6910 delta = op1;
6911 s = NULL_TREE;
6913 else
6915 /* Simulate we are delta * 1. */
6916 delta = op1;
6917 s = integer_one_node;
6920 /* Handle &x.array the same as we would handle &x.array[0]. */
6921 if (TREE_CODE (ref) == COMPONENT_REF
6922 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6924 tree domain;
6926 /* Remember if this was a multi-dimensional array. */
6927 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6928 mdim = true;
6930 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6931 if (! domain)
6932 goto cont;
6933 itype = TREE_TYPE (domain);
6935 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6936 if (TREE_CODE (step) != INTEGER_CST)
6937 goto cont;
6939 if (s)
6941 if (! tree_int_cst_equal (step, s))
6942 goto cont;
6944 else
6946 /* Try if delta is a multiple of step. */
6947 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6948 if (! tmp)
6949 goto cont;
6950 delta = tmp;
6953 /* Only fold here if we can verify we do not overflow one
6954 dimension of a multi-dimensional array. */
6955 if (mdim)
6957 tree tmp;
6959 if (!TYPE_MIN_VALUE (domain)
6960 || !TYPE_MAX_VALUE (domain)
6961 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6962 goto cont;
6964 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6965 fold_convert_loc (loc, itype,
6966 TYPE_MIN_VALUE (domain)),
6967 fold_convert_loc (loc, itype, delta));
6968 if (TREE_CODE (tmp) != INTEGER_CST
6969 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6970 goto cont;
6973 /* We found a suitable component reference. */
6975 pref = TREE_OPERAND (addr, 0);
6976 ret = copy_node (pref);
6977 SET_EXPR_LOCATION (ret, loc);
6979 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6980 fold_build2_loc
6981 (loc, PLUS_EXPR, itype,
6982 fold_convert_loc (loc, itype,
6983 TYPE_MIN_VALUE
6984 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6985 fold_convert_loc (loc, itype, delta)),
6986 NULL_TREE, NULL_TREE);
6987 return build_fold_addr_expr_loc (loc, ret);
6990 cont:
6992 for (;; ref = TREE_OPERAND (ref, 0))
6994 if (TREE_CODE (ref) == ARRAY_REF)
6996 tree domain;
6998 /* Remember if this was a multi-dimensional array. */
6999 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7000 mdim = true;
7002 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7003 if (! domain)
7004 continue;
7005 itype = TREE_TYPE (domain);
7007 step = array_ref_element_size (ref);
7008 if (TREE_CODE (step) != INTEGER_CST)
7009 continue;
7011 if (s)
7013 if (! tree_int_cst_equal (step, s))
7014 continue;
7016 else
7018 /* Try if delta is a multiple of step. */
7019 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7020 if (! tmp)
7021 continue;
7022 delta = tmp;
7025 /* Only fold here if we can verify we do not overflow one
7026 dimension of a multi-dimensional array. */
7027 if (mdim)
7029 tree tmp;
7031 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7032 || !TYPE_MAX_VALUE (domain)
7033 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7034 continue;
7036 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7037 fold_convert_loc (loc, itype,
7038 TREE_OPERAND (ref, 1)),
7039 fold_convert_loc (loc, itype, delta));
7040 if (!tmp
7041 || TREE_CODE (tmp) != INTEGER_CST
7042 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7043 continue;
7046 break;
7048 else
7049 mdim = false;
7051 if (!handled_component_p (ref))
7052 return NULL_TREE;
7055 /* We found the suitable array reference. So copy everything up to it,
7056 and replace the index. */
7058 pref = TREE_OPERAND (addr, 0);
7059 ret = copy_node (pref);
7060 SET_EXPR_LOCATION (ret, loc);
7061 pos = ret;
7063 while (pref != ref)
7065 pref = TREE_OPERAND (pref, 0);
7066 TREE_OPERAND (pos, 0) = copy_node (pref);
7067 pos = TREE_OPERAND (pos, 0);
7070 TREE_OPERAND (pos, 1)
7071 = fold_build2_loc (loc, PLUS_EXPR, itype,
7072 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7073 fold_convert_loc (loc, itype, delta));
7074 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7078 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7079 means A >= Y && A != MAX, but in this case we know that
7080 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7082 static tree
7083 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7085 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7087 if (TREE_CODE (bound) == LT_EXPR)
7088 a = TREE_OPERAND (bound, 0);
7089 else if (TREE_CODE (bound) == GT_EXPR)
7090 a = TREE_OPERAND (bound, 1);
7091 else
7092 return NULL_TREE;
7094 typea = TREE_TYPE (a);
7095 if (!INTEGRAL_TYPE_P (typea)
7096 && !POINTER_TYPE_P (typea))
7097 return NULL_TREE;
7099 if (TREE_CODE (ineq) == LT_EXPR)
7101 a1 = TREE_OPERAND (ineq, 1);
7102 y = TREE_OPERAND (ineq, 0);
7104 else if (TREE_CODE (ineq) == GT_EXPR)
7106 a1 = TREE_OPERAND (ineq, 0);
7107 y = TREE_OPERAND (ineq, 1);
7109 else
7110 return NULL_TREE;
7112 if (TREE_TYPE (a1) != typea)
7113 return NULL_TREE;
7115 if (POINTER_TYPE_P (typea))
7117 /* Convert the pointer types into integer before taking the difference. */
7118 tree ta = fold_convert_loc (loc, ssizetype, a);
7119 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7120 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7122 else
7123 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7125 if (!diff || !integer_onep (diff))
7126 return NULL_TREE;
7128 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7131 /* Fold a sum or difference of at least one multiplication.
7132 Returns the folded tree or NULL if no simplification could be made. */
7134 static tree
7135 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7136 tree arg0, tree arg1)
7138 tree arg00, arg01, arg10, arg11;
7139 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7141 /* (A * C) +- (B * C) -> (A+-B) * C.
7142 (A * C) +- A -> A * (C+-1).
7143 We are most concerned about the case where C is a constant,
7144 but other combinations show up during loop reduction. Since
7145 it is not difficult, try all four possibilities. */
7147 if (TREE_CODE (arg0) == MULT_EXPR)
7149 arg00 = TREE_OPERAND (arg0, 0);
7150 arg01 = TREE_OPERAND (arg0, 1);
7152 else if (TREE_CODE (arg0) == INTEGER_CST)
7154 arg00 = build_one_cst (type);
7155 arg01 = arg0;
7157 else
7159 /* We cannot generate constant 1 for fract. */
7160 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7161 return NULL_TREE;
7162 arg00 = arg0;
7163 arg01 = build_one_cst (type);
7165 if (TREE_CODE (arg1) == MULT_EXPR)
7167 arg10 = TREE_OPERAND (arg1, 0);
7168 arg11 = TREE_OPERAND (arg1, 1);
7170 else if (TREE_CODE (arg1) == INTEGER_CST)
7172 arg10 = build_one_cst (type);
7173 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7174 the purpose of this canonicalization. */
7175 if (TREE_INT_CST_HIGH (arg1) == -1
7176 && negate_expr_p (arg1)
7177 && code == PLUS_EXPR)
7179 arg11 = negate_expr (arg1);
7180 code = MINUS_EXPR;
7182 else
7183 arg11 = arg1;
7185 else
7187 /* We cannot generate constant 1 for fract. */
7188 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7189 return NULL_TREE;
7190 arg10 = arg1;
7191 arg11 = build_one_cst (type);
7193 same = NULL_TREE;
7195 if (operand_equal_p (arg01, arg11, 0))
7196 same = arg01, alt0 = arg00, alt1 = arg10;
7197 else if (operand_equal_p (arg00, arg10, 0))
7198 same = arg00, alt0 = arg01, alt1 = arg11;
7199 else if (operand_equal_p (arg00, arg11, 0))
7200 same = arg00, alt0 = arg01, alt1 = arg10;
7201 else if (operand_equal_p (arg01, arg10, 0))
7202 same = arg01, alt0 = arg00, alt1 = arg11;
7204 /* No identical multiplicands; see if we can find a common
7205 power-of-two factor in non-power-of-two multiplies. This
7206 can help in multi-dimensional array access. */
7207 else if (host_integerp (arg01, 0)
7208 && host_integerp (arg11, 0))
7210 HOST_WIDE_INT int01, int11, tmp;
7211 bool swap = false;
7212 tree maybe_same;
7213 int01 = TREE_INT_CST_LOW (arg01);
7214 int11 = TREE_INT_CST_LOW (arg11);
7216 /* Move min of absolute values to int11. */
7217 if (absu_hwi (int01) < absu_hwi (int11))
7219 tmp = int01, int01 = int11, int11 = tmp;
7220 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7221 maybe_same = arg01;
7222 swap = true;
7224 else
7225 maybe_same = arg11;
7227 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7228 /* The remainder should not be a constant, otherwise we
7229 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7230 increased the number of multiplications necessary. */
7231 && TREE_CODE (arg10) != INTEGER_CST)
7233 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7234 build_int_cst (TREE_TYPE (arg00),
7235 int01 / int11));
7236 alt1 = arg10;
7237 same = maybe_same;
7238 if (swap)
7239 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7243 if (same)
7244 return fold_build2_loc (loc, MULT_EXPR, type,
7245 fold_build2_loc (loc, code, type,
7246 fold_convert_loc (loc, type, alt0),
7247 fold_convert_loc (loc, type, alt1)),
7248 fold_convert_loc (loc, type, same));
7250 return NULL_TREE;
7253 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7254 specified by EXPR into the buffer PTR of length LEN bytes.
7255 Return the number of bytes placed in the buffer, or zero
7256 upon failure. */
7258 static int
7259 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7261 tree type = TREE_TYPE (expr);
7262 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7263 int byte, offset, word, words;
7264 unsigned char value;
7266 if (total_bytes > len)
7267 return 0;
7268 words = total_bytes / UNITS_PER_WORD;
7270 for (byte = 0; byte < total_bytes; byte++)
7272 int bitpos = byte * BITS_PER_UNIT;
7273 if (bitpos < HOST_BITS_PER_WIDE_INT)
7274 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7275 else
7276 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7277 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7279 if (total_bytes > UNITS_PER_WORD)
7281 word = byte / UNITS_PER_WORD;
7282 if (WORDS_BIG_ENDIAN)
7283 word = (words - 1) - word;
7284 offset = word * UNITS_PER_WORD;
7285 if (BYTES_BIG_ENDIAN)
7286 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7287 else
7288 offset += byte % UNITS_PER_WORD;
7290 else
7291 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7292 ptr[offset] = value;
7294 return total_bytes;
7298 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7299 specified by EXPR into the buffer PTR of length LEN bytes.
7300 Return the number of bytes placed in the buffer, or zero
7301 upon failure. */
7303 static int
7304 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7306 tree type = TREE_TYPE (expr);
7307 enum machine_mode mode = TYPE_MODE (type);
7308 int total_bytes = GET_MODE_SIZE (mode);
7309 FIXED_VALUE_TYPE value;
7310 tree i_value, i_type;
7312 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7313 return 0;
7315 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7317 if (NULL_TREE == i_type
7318 || TYPE_PRECISION (i_type) != total_bytes)
7319 return 0;
7321 value = TREE_FIXED_CST (expr);
7322 i_value = double_int_to_tree (i_type, value.data);
7324 return native_encode_int (i_value, ptr, len);
7328 /* Subroutine of native_encode_expr. Encode the REAL_CST
7329 specified by EXPR into the buffer PTR of length LEN bytes.
7330 Return the number of bytes placed in the buffer, or zero
7331 upon failure. */
7333 static int
7334 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7336 tree type = TREE_TYPE (expr);
7337 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7338 int byte, offset, word, words, bitpos;
7339 unsigned char value;
7341 /* There are always 32 bits in each long, no matter the size of
7342 the hosts long. We handle floating point representations with
7343 up to 192 bits. */
7344 long tmp[6];
7346 if (total_bytes > len)
7347 return 0;
7348 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7350 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7352 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7353 bitpos += BITS_PER_UNIT)
7355 byte = (bitpos / BITS_PER_UNIT) & 3;
7356 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7358 if (UNITS_PER_WORD < 4)
7360 word = byte / UNITS_PER_WORD;
7361 if (WORDS_BIG_ENDIAN)
7362 word = (words - 1) - word;
7363 offset = word * UNITS_PER_WORD;
7364 if (BYTES_BIG_ENDIAN)
7365 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7366 else
7367 offset += byte % UNITS_PER_WORD;
7369 else
7370 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7371 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7373 return total_bytes;
7376 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7377 specified by EXPR into the buffer PTR of length LEN bytes.
7378 Return the number of bytes placed in the buffer, or zero
7379 upon failure. */
7381 static int
7382 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7384 int rsize, isize;
7385 tree part;
7387 part = TREE_REALPART (expr);
7388 rsize = native_encode_expr (part, ptr, len);
7389 if (rsize == 0)
7390 return 0;
7391 part = TREE_IMAGPART (expr);
7392 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7393 if (isize != rsize)
7394 return 0;
7395 return rsize + isize;
7399 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7400 specified by EXPR into the buffer PTR of length LEN bytes.
7401 Return the number of bytes placed in the buffer, or zero
7402 upon failure. */
7404 static int
7405 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7407 unsigned i, count;
7408 int size, offset;
7409 tree itype, elem;
7411 offset = 0;
7412 count = VECTOR_CST_NELTS (expr);
7413 itype = TREE_TYPE (TREE_TYPE (expr));
7414 size = GET_MODE_SIZE (TYPE_MODE (itype));
7415 for (i = 0; i < count; i++)
7417 elem = VECTOR_CST_ELT (expr, i);
7418 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7419 return 0;
7420 offset += size;
7422 return offset;
7426 /* Subroutine of native_encode_expr. Encode the STRING_CST
7427 specified by EXPR into the buffer PTR of length LEN bytes.
7428 Return the number of bytes placed in the buffer, or zero
7429 upon failure. */
7431 static int
7432 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7434 tree type = TREE_TYPE (expr);
7435 HOST_WIDE_INT total_bytes;
7437 if (TREE_CODE (type) != ARRAY_TYPE
7438 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7439 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7440 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7441 return 0;
7442 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7443 if (total_bytes > len)
7444 return 0;
7445 if (TREE_STRING_LENGTH (expr) < total_bytes)
7447 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7448 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7449 total_bytes - TREE_STRING_LENGTH (expr));
7451 else
7452 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7453 return total_bytes;
7457 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7458 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7459 buffer PTR of length LEN bytes. Return the number of bytes
7460 placed in the buffer, or zero upon failure. */
7463 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7465 switch (TREE_CODE (expr))
7467 case INTEGER_CST:
7468 return native_encode_int (expr, ptr, len);
7470 case REAL_CST:
7471 return native_encode_real (expr, ptr, len);
7473 case FIXED_CST:
7474 return native_encode_fixed (expr, ptr, len);
7476 case COMPLEX_CST:
7477 return native_encode_complex (expr, ptr, len);
7479 case VECTOR_CST:
7480 return native_encode_vector (expr, ptr, len);
7482 case STRING_CST:
7483 return native_encode_string (expr, ptr, len);
7485 default:
7486 return 0;
7491 /* Subroutine of native_interpret_expr. Interpret the contents of
7492 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7493 If the buffer cannot be interpreted, return NULL_TREE. */
7495 static tree
7496 native_interpret_int (tree type, const unsigned char *ptr, int len)
7498 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7499 double_int result;
7501 if (total_bytes > len
7502 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7503 return NULL_TREE;
7505 result = double_int::from_buffer (ptr, total_bytes);
7507 return double_int_to_tree (type, result);
7511 /* Subroutine of native_interpret_expr. Interpret the contents of
7512 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7513 If the buffer cannot be interpreted, return NULL_TREE. */
7515 static tree
7516 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7518 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7519 double_int result;
7520 FIXED_VALUE_TYPE fixed_value;
7522 if (total_bytes > len
7523 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7524 return NULL_TREE;
7526 result = double_int::from_buffer (ptr, total_bytes);
7527 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7529 return build_fixed (type, fixed_value);
7533 /* Subroutine of native_interpret_expr. Interpret the contents of
7534 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7535 If the buffer cannot be interpreted, return NULL_TREE. */
7537 static tree
7538 native_interpret_real (tree type, const unsigned char *ptr, int len)
7540 enum machine_mode mode = TYPE_MODE (type);
7541 int total_bytes = GET_MODE_SIZE (mode);
7542 int byte, offset, word, words, bitpos;
7543 unsigned char value;
7544 /* There are always 32 bits in each long, no matter the size of
7545 the hosts long. We handle floating point representations with
7546 up to 192 bits. */
7547 REAL_VALUE_TYPE r;
7548 long tmp[6];
7550 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7551 if (total_bytes > len || total_bytes > 24)
7552 return NULL_TREE;
7553 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7555 memset (tmp, 0, sizeof (tmp));
7556 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7557 bitpos += BITS_PER_UNIT)
7559 byte = (bitpos / BITS_PER_UNIT) & 3;
7560 if (UNITS_PER_WORD < 4)
7562 word = byte / UNITS_PER_WORD;
7563 if (WORDS_BIG_ENDIAN)
7564 word = (words - 1) - word;
7565 offset = word * UNITS_PER_WORD;
7566 if (BYTES_BIG_ENDIAN)
7567 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7568 else
7569 offset += byte % UNITS_PER_WORD;
7571 else
7572 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7573 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7575 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7578 real_from_target (&r, tmp, mode);
7579 return build_real (type, r);
7583 /* Subroutine of native_interpret_expr. Interpret the contents of
7584 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7585 If the buffer cannot be interpreted, return NULL_TREE. */
7587 static tree
7588 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7590 tree etype, rpart, ipart;
7591 int size;
7593 etype = TREE_TYPE (type);
7594 size = GET_MODE_SIZE (TYPE_MODE (etype));
7595 if (size * 2 > len)
7596 return NULL_TREE;
7597 rpart = native_interpret_expr (etype, ptr, size);
7598 if (!rpart)
7599 return NULL_TREE;
7600 ipart = native_interpret_expr (etype, ptr+size, size);
7601 if (!ipart)
7602 return NULL_TREE;
7603 return build_complex (type, rpart, ipart);
7607 /* Subroutine of native_interpret_expr. Interpret the contents of
7608 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7609 If the buffer cannot be interpreted, return NULL_TREE. */
7611 static tree
7612 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7614 tree etype, elem;
7615 int i, size, count;
7616 tree *elements;
7618 etype = TREE_TYPE (type);
7619 size = GET_MODE_SIZE (TYPE_MODE (etype));
7620 count = TYPE_VECTOR_SUBPARTS (type);
7621 if (size * count > len)
7622 return NULL_TREE;
7624 elements = XALLOCAVEC (tree, count);
7625 for (i = count - 1; i >= 0; i--)
7627 elem = native_interpret_expr (etype, ptr+(i*size), size);
7628 if (!elem)
7629 return NULL_TREE;
7630 elements[i] = elem;
7632 return build_vector (type, elements);
7636 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7637 the buffer PTR of length LEN as a constant of type TYPE. For
7638 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7639 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7640 return NULL_TREE. */
7642 tree
7643 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7645 switch (TREE_CODE (type))
7647 case INTEGER_TYPE:
7648 case ENUMERAL_TYPE:
7649 case BOOLEAN_TYPE:
7650 case POINTER_TYPE:
7651 case REFERENCE_TYPE:
7652 return native_interpret_int (type, ptr, len);
7654 case REAL_TYPE:
7655 return native_interpret_real (type, ptr, len);
7657 case FIXED_POINT_TYPE:
7658 return native_interpret_fixed (type, ptr, len);
7660 case COMPLEX_TYPE:
7661 return native_interpret_complex (type, ptr, len);
7663 case VECTOR_TYPE:
7664 return native_interpret_vector (type, ptr, len);
7666 default:
7667 return NULL_TREE;
7671 /* Returns true if we can interpret the contents of a native encoding
7672 as TYPE. */
7674 static bool
7675 can_native_interpret_type_p (tree type)
7677 switch (TREE_CODE (type))
7679 case INTEGER_TYPE:
7680 case ENUMERAL_TYPE:
7681 case BOOLEAN_TYPE:
7682 case POINTER_TYPE:
7683 case REFERENCE_TYPE:
7684 case FIXED_POINT_TYPE:
7685 case REAL_TYPE:
7686 case COMPLEX_TYPE:
7687 case VECTOR_TYPE:
7688 return true;
7689 default:
7690 return false;
7694 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7695 TYPE at compile-time. If we're unable to perform the conversion
7696 return NULL_TREE. */
7698 static tree
7699 fold_view_convert_expr (tree type, tree expr)
7701 /* We support up to 512-bit values (for V8DFmode). */
7702 unsigned char buffer[64];
7703 int len;
7705 /* Check that the host and target are sane. */
7706 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7707 return NULL_TREE;
7709 len = native_encode_expr (expr, buffer, sizeof (buffer));
7710 if (len == 0)
7711 return NULL_TREE;
7713 return native_interpret_expr (type, buffer, len);
7716 /* Build an expression for the address of T. Folds away INDIRECT_REF
7717 to avoid confusing the gimplify process. */
7719 tree
7720 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7722 /* The size of the object is not relevant when talking about its address. */
7723 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7724 t = TREE_OPERAND (t, 0);
7726 if (TREE_CODE (t) == INDIRECT_REF)
7728 t = TREE_OPERAND (t, 0);
7730 if (TREE_TYPE (t) != ptrtype)
7731 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7733 else if (TREE_CODE (t) == MEM_REF
7734 && integer_zerop (TREE_OPERAND (t, 1)))
7735 return TREE_OPERAND (t, 0);
7736 else if (TREE_CODE (t) == MEM_REF
7737 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7738 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7739 TREE_OPERAND (t, 0),
7740 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7741 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7743 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7745 if (TREE_TYPE (t) != ptrtype)
7746 t = fold_convert_loc (loc, ptrtype, t);
7748 else
7749 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7751 return t;
7754 /* Build an expression for the address of T. */
7756 tree
7757 build_fold_addr_expr_loc (location_t loc, tree t)
7759 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7761 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7764 static bool vec_cst_ctor_to_array (tree, tree *);
7766 /* Fold a unary expression of code CODE and type TYPE with operand
7767 OP0. Return the folded expression if folding is successful.
7768 Otherwise, return NULL_TREE. */
7770 tree
7771 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7773 tree tem;
7774 tree arg0;
7775 enum tree_code_class kind = TREE_CODE_CLASS (code);
7777 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7778 && TREE_CODE_LENGTH (code) == 1);
7780 arg0 = op0;
7781 if (arg0)
7783 if (CONVERT_EXPR_CODE_P (code)
7784 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7786 /* Don't use STRIP_NOPS, because signedness of argument type
7787 matters. */
7788 STRIP_SIGN_NOPS (arg0);
7790 else
7792 /* Strip any conversions that don't change the mode. This
7793 is safe for every expression, except for a comparison
7794 expression because its signedness is derived from its
7795 operands.
7797 Note that this is done as an internal manipulation within
7798 the constant folder, in order to find the simplest
7799 representation of the arguments so that their form can be
7800 studied. In any cases, the appropriate type conversions
7801 should be put back in the tree that will get out of the
7802 constant folder. */
7803 STRIP_NOPS (arg0);
7807 if (TREE_CODE_CLASS (code) == tcc_unary)
7809 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7810 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7811 fold_build1_loc (loc, code, type,
7812 fold_convert_loc (loc, TREE_TYPE (op0),
7813 TREE_OPERAND (arg0, 1))));
7814 else if (TREE_CODE (arg0) == COND_EXPR)
7816 tree arg01 = TREE_OPERAND (arg0, 1);
7817 tree arg02 = TREE_OPERAND (arg0, 2);
7818 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7819 arg01 = fold_build1_loc (loc, code, type,
7820 fold_convert_loc (loc,
7821 TREE_TYPE (op0), arg01));
7822 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7823 arg02 = fold_build1_loc (loc, code, type,
7824 fold_convert_loc (loc,
7825 TREE_TYPE (op0), arg02));
7826 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7827 arg01, arg02);
7829 /* If this was a conversion, and all we did was to move into
7830 inside the COND_EXPR, bring it back out. But leave it if
7831 it is a conversion from integer to integer and the
7832 result precision is no wider than a word since such a
7833 conversion is cheap and may be optimized away by combine,
7834 while it couldn't if it were outside the COND_EXPR. Then return
7835 so we don't get into an infinite recursion loop taking the
7836 conversion out and then back in. */
7838 if ((CONVERT_EXPR_CODE_P (code)
7839 || code == NON_LVALUE_EXPR)
7840 && TREE_CODE (tem) == COND_EXPR
7841 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7842 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7843 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7844 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7845 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7846 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7847 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7848 && (INTEGRAL_TYPE_P
7849 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7850 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7851 || flag_syntax_only))
7852 tem = build1_loc (loc, code, type,
7853 build3 (COND_EXPR,
7854 TREE_TYPE (TREE_OPERAND
7855 (TREE_OPERAND (tem, 1), 0)),
7856 TREE_OPERAND (tem, 0),
7857 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7858 TREE_OPERAND (TREE_OPERAND (tem, 2),
7859 0)));
7860 return tem;
7864 switch (code)
7866 case PAREN_EXPR:
7867 /* Re-association barriers around constants and other re-association
7868 barriers can be removed. */
7869 if (CONSTANT_CLASS_P (op0)
7870 || TREE_CODE (op0) == PAREN_EXPR)
7871 return fold_convert_loc (loc, type, op0);
7872 return NULL_TREE;
7874 CASE_CONVERT:
7875 case FLOAT_EXPR:
7876 case FIX_TRUNC_EXPR:
7877 if (TREE_TYPE (op0) == type)
7878 return op0;
7880 if (COMPARISON_CLASS_P (op0))
7882 /* If we have (type) (a CMP b) and type is an integral type, return
7883 new expression involving the new type. Canonicalize
7884 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7885 non-integral type.
7886 Do not fold the result as that would not simplify further, also
7887 folding again results in recursions. */
7888 if (TREE_CODE (type) == BOOLEAN_TYPE)
7889 return build2_loc (loc, TREE_CODE (op0), type,
7890 TREE_OPERAND (op0, 0),
7891 TREE_OPERAND (op0, 1));
7892 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7893 && TREE_CODE (type) != VECTOR_TYPE)
7894 return build3_loc (loc, COND_EXPR, type, op0,
7895 constant_boolean_node (true, type),
7896 constant_boolean_node (false, type));
7899 /* Handle cases of two conversions in a row. */
7900 if (CONVERT_EXPR_P (op0))
7902 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7903 tree inter_type = TREE_TYPE (op0);
7904 int inside_int = INTEGRAL_TYPE_P (inside_type);
7905 int inside_ptr = POINTER_TYPE_P (inside_type);
7906 int inside_float = FLOAT_TYPE_P (inside_type);
7907 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7908 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7909 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7910 int inter_int = INTEGRAL_TYPE_P (inter_type);
7911 int inter_ptr = POINTER_TYPE_P (inter_type);
7912 int inter_float = FLOAT_TYPE_P (inter_type);
7913 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7914 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7915 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7916 int final_int = INTEGRAL_TYPE_P (type);
7917 int final_ptr = POINTER_TYPE_P (type);
7918 int final_float = FLOAT_TYPE_P (type);
7919 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7920 unsigned int final_prec = TYPE_PRECISION (type);
7921 int final_unsignedp = TYPE_UNSIGNED (type);
7923 /* In addition to the cases of two conversions in a row
7924 handled below, if we are converting something to its own
7925 type via an object of identical or wider precision, neither
7926 conversion is needed. */
7927 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7928 && (((inter_int || inter_ptr) && final_int)
7929 || (inter_float && final_float))
7930 && inter_prec >= final_prec)
7931 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7933 /* Likewise, if the intermediate and initial types are either both
7934 float or both integer, we don't need the middle conversion if the
7935 former is wider than the latter and doesn't change the signedness
7936 (for integers). Avoid this if the final type is a pointer since
7937 then we sometimes need the middle conversion. Likewise if the
7938 final type has a precision not equal to the size of its mode. */
7939 if (((inter_int && inside_int)
7940 || (inter_float && inside_float)
7941 || (inter_vec && inside_vec))
7942 && inter_prec >= inside_prec
7943 && (inter_float || inter_vec
7944 || inter_unsignedp == inside_unsignedp)
7945 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7946 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7947 && ! final_ptr
7948 && (! final_vec || inter_prec == inside_prec))
7949 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7951 /* If we have a sign-extension of a zero-extended value, we can
7952 replace that by a single zero-extension. Likewise if the
7953 final conversion does not change precision we can drop the
7954 intermediate conversion. */
7955 if (inside_int && inter_int && final_int
7956 && ((inside_prec < inter_prec && inter_prec < final_prec
7957 && inside_unsignedp && !inter_unsignedp)
7958 || final_prec == inter_prec))
7959 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7961 /* Two conversions in a row are not needed unless:
7962 - some conversion is floating-point (overstrict for now), or
7963 - some conversion is a vector (overstrict for now), or
7964 - the intermediate type is narrower than both initial and
7965 final, or
7966 - the intermediate type and innermost type differ in signedness,
7967 and the outermost type is wider than the intermediate, or
7968 - the initial type is a pointer type and the precisions of the
7969 intermediate and final types differ, or
7970 - the final type is a pointer type and the precisions of the
7971 initial and intermediate types differ. */
7972 if (! inside_float && ! inter_float && ! final_float
7973 && ! inside_vec && ! inter_vec && ! final_vec
7974 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7975 && ! (inside_int && inter_int
7976 && inter_unsignedp != inside_unsignedp
7977 && inter_prec < final_prec)
7978 && ((inter_unsignedp && inter_prec > inside_prec)
7979 == (final_unsignedp && final_prec > inter_prec))
7980 && ! (inside_ptr && inter_prec != final_prec)
7981 && ! (final_ptr && inside_prec != inter_prec)
7982 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7983 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7984 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7987 /* Handle (T *)&A.B.C for A being of type T and B and C
7988 living at offset zero. This occurs frequently in
7989 C++ upcasting and then accessing the base. */
7990 if (TREE_CODE (op0) == ADDR_EXPR
7991 && POINTER_TYPE_P (type)
7992 && handled_component_p (TREE_OPERAND (op0, 0)))
7994 HOST_WIDE_INT bitsize, bitpos;
7995 tree offset;
7996 enum machine_mode mode;
7997 int unsignedp, volatilep;
7998 tree base = TREE_OPERAND (op0, 0);
7999 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8000 &mode, &unsignedp, &volatilep, false);
8001 /* If the reference was to a (constant) zero offset, we can use
8002 the address of the base if it has the same base type
8003 as the result type and the pointer type is unqualified. */
8004 if (! offset && bitpos == 0
8005 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8006 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8007 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8008 return fold_convert_loc (loc, type,
8009 build_fold_addr_expr_loc (loc, base));
8012 if (TREE_CODE (op0) == MODIFY_EXPR
8013 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8014 /* Detect assigning a bitfield. */
8015 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8016 && DECL_BIT_FIELD
8017 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8019 /* Don't leave an assignment inside a conversion
8020 unless assigning a bitfield. */
8021 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8022 /* First do the assignment, then return converted constant. */
8023 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8024 TREE_NO_WARNING (tem) = 1;
8025 TREE_USED (tem) = 1;
8026 return tem;
8029 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8030 constants (if x has signed type, the sign bit cannot be set
8031 in c). This folds extension into the BIT_AND_EXPR.
8032 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8033 very likely don't have maximal range for their precision and this
8034 transformation effectively doesn't preserve non-maximal ranges. */
8035 if (TREE_CODE (type) == INTEGER_TYPE
8036 && TREE_CODE (op0) == BIT_AND_EXPR
8037 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8039 tree and_expr = op0;
8040 tree and0 = TREE_OPERAND (and_expr, 0);
8041 tree and1 = TREE_OPERAND (and_expr, 1);
8042 int change = 0;
8044 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8045 || (TYPE_PRECISION (type)
8046 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8047 change = 1;
8048 else if (TYPE_PRECISION (TREE_TYPE (and1))
8049 <= HOST_BITS_PER_WIDE_INT
8050 && host_integerp (and1, 1))
8052 unsigned HOST_WIDE_INT cst;
8054 cst = tree_low_cst (and1, 1);
8055 cst &= (HOST_WIDE_INT) -1
8056 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8057 change = (cst == 0);
8058 #ifdef LOAD_EXTEND_OP
8059 if (change
8060 && !flag_syntax_only
8061 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8062 == ZERO_EXTEND))
8064 tree uns = unsigned_type_for (TREE_TYPE (and0));
8065 and0 = fold_convert_loc (loc, uns, and0);
8066 and1 = fold_convert_loc (loc, uns, and1);
8068 #endif
8070 if (change)
8072 tem = force_fit_type_double (type, tree_to_double_int (and1),
8073 0, TREE_OVERFLOW (and1));
8074 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8075 fold_convert_loc (loc, type, and0), tem);
8079 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8080 when one of the new casts will fold away. Conservatively we assume
8081 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8082 if (POINTER_TYPE_P (type)
8083 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8084 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8085 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8086 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8087 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8089 tree arg00 = TREE_OPERAND (arg0, 0);
8090 tree arg01 = TREE_OPERAND (arg0, 1);
8092 return fold_build_pointer_plus_loc
8093 (loc, fold_convert_loc (loc, type, arg00), arg01);
8096 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8097 of the same precision, and X is an integer type not narrower than
8098 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8099 if (INTEGRAL_TYPE_P (type)
8100 && TREE_CODE (op0) == BIT_NOT_EXPR
8101 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8102 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8103 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8105 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8106 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8107 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8108 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8109 fold_convert_loc (loc, type, tem));
8112 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8113 type of X and Y (integer types only). */
8114 if (INTEGRAL_TYPE_P (type)
8115 && TREE_CODE (op0) == MULT_EXPR
8116 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8117 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8119 /* Be careful not to introduce new overflows. */
8120 tree mult_type;
8121 if (TYPE_OVERFLOW_WRAPS (type))
8122 mult_type = type;
8123 else
8124 mult_type = unsigned_type_for (type);
8126 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8128 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8129 fold_convert_loc (loc, mult_type,
8130 TREE_OPERAND (op0, 0)),
8131 fold_convert_loc (loc, mult_type,
8132 TREE_OPERAND (op0, 1)));
8133 return fold_convert_loc (loc, type, tem);
8137 tem = fold_convert_const (code, type, op0);
8138 return tem ? tem : NULL_TREE;
8140 case ADDR_SPACE_CONVERT_EXPR:
8141 if (integer_zerop (arg0))
8142 return fold_convert_const (code, type, arg0);
8143 return NULL_TREE;
8145 case FIXED_CONVERT_EXPR:
8146 tem = fold_convert_const (code, type, arg0);
8147 return tem ? tem : NULL_TREE;
8149 case VIEW_CONVERT_EXPR:
8150 if (TREE_TYPE (op0) == type)
8151 return op0;
8152 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8153 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8154 type, TREE_OPERAND (op0, 0));
8155 if (TREE_CODE (op0) == MEM_REF)
8156 return fold_build2_loc (loc, MEM_REF, type,
8157 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8159 /* For integral conversions with the same precision or pointer
8160 conversions use a NOP_EXPR instead. */
8161 if ((INTEGRAL_TYPE_P (type)
8162 || POINTER_TYPE_P (type))
8163 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8164 || POINTER_TYPE_P (TREE_TYPE (op0)))
8165 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8166 return fold_convert_loc (loc, type, op0);
8168 /* Strip inner integral conversions that do not change the precision. */
8169 if (CONVERT_EXPR_P (op0)
8170 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8171 || POINTER_TYPE_P (TREE_TYPE (op0)))
8172 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8173 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8174 && (TYPE_PRECISION (TREE_TYPE (op0))
8175 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8176 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8177 type, TREE_OPERAND (op0, 0));
8179 return fold_view_convert_expr (type, op0);
8181 case NEGATE_EXPR:
8182 tem = fold_negate_expr (loc, arg0);
8183 if (tem)
8184 return fold_convert_loc (loc, type, tem);
8185 return NULL_TREE;
8187 case ABS_EXPR:
8188 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8189 return fold_abs_const (arg0, type);
8190 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8191 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8192 /* Convert fabs((double)float) into (double)fabsf(float). */
8193 else if (TREE_CODE (arg0) == NOP_EXPR
8194 && TREE_CODE (type) == REAL_TYPE)
8196 tree targ0 = strip_float_extensions (arg0);
8197 if (targ0 != arg0)
8198 return fold_convert_loc (loc, type,
8199 fold_build1_loc (loc, ABS_EXPR,
8200 TREE_TYPE (targ0),
8201 targ0));
8203 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8204 else if (TREE_CODE (arg0) == ABS_EXPR)
8205 return arg0;
8206 else if (tree_expr_nonnegative_p (arg0))
8207 return arg0;
8209 /* Strip sign ops from argument. */
8210 if (TREE_CODE (type) == REAL_TYPE)
8212 tem = fold_strip_sign_ops (arg0);
8213 if (tem)
8214 return fold_build1_loc (loc, ABS_EXPR, type,
8215 fold_convert_loc (loc, type, tem));
8217 return NULL_TREE;
8219 case CONJ_EXPR:
8220 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8221 return fold_convert_loc (loc, type, arg0);
8222 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8224 tree itype = TREE_TYPE (type);
8225 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8226 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8227 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8228 negate_expr (ipart));
8230 if (TREE_CODE (arg0) == COMPLEX_CST)
8232 tree itype = TREE_TYPE (type);
8233 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8234 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8235 return build_complex (type, rpart, negate_expr (ipart));
8237 if (TREE_CODE (arg0) == CONJ_EXPR)
8238 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8239 return NULL_TREE;
8241 case BIT_NOT_EXPR:
8242 if (TREE_CODE (arg0) == INTEGER_CST)
8243 return fold_not_const (arg0, type);
8244 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8245 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8246 /* Convert ~ (-A) to A - 1. */
8247 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8248 return fold_build2_loc (loc, MINUS_EXPR, type,
8249 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8250 build_int_cst (type, 1));
8251 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8252 else if (INTEGRAL_TYPE_P (type)
8253 && ((TREE_CODE (arg0) == MINUS_EXPR
8254 && integer_onep (TREE_OPERAND (arg0, 1)))
8255 || (TREE_CODE (arg0) == PLUS_EXPR
8256 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8257 return fold_build1_loc (loc, NEGATE_EXPR, type,
8258 fold_convert_loc (loc, type,
8259 TREE_OPERAND (arg0, 0)));
8260 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8261 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8262 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8263 fold_convert_loc (loc, type,
8264 TREE_OPERAND (arg0, 0)))))
8265 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8266 fold_convert_loc (loc, type,
8267 TREE_OPERAND (arg0, 1)));
8268 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8269 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8270 fold_convert_loc (loc, type,
8271 TREE_OPERAND (arg0, 1)))))
8272 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8273 fold_convert_loc (loc, type,
8274 TREE_OPERAND (arg0, 0)), tem);
8275 /* Perform BIT_NOT_EXPR on each element individually. */
8276 else if (TREE_CODE (arg0) == VECTOR_CST)
8278 tree *elements;
8279 tree elem;
8280 unsigned count = VECTOR_CST_NELTS (arg0), i;
8282 elements = XALLOCAVEC (tree, count);
8283 for (i = 0; i < count; i++)
8285 elem = VECTOR_CST_ELT (arg0, i);
8286 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8287 if (elem == NULL_TREE)
8288 break;
8289 elements[i] = elem;
8291 if (i == count)
8292 return build_vector (type, elements);
8294 else if (COMPARISON_CLASS_P (arg0)
8295 && (VECTOR_TYPE_P (type)
8296 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8298 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8299 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8300 HONOR_NANS (TYPE_MODE (op_type)));
8301 if (subcode != ERROR_MARK)
8302 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8303 TREE_OPERAND (arg0, 1));
8307 return NULL_TREE;
8309 case TRUTH_NOT_EXPR:
8310 /* Note that the operand of this must be an int
8311 and its values must be 0 or 1.
8312 ("true" is a fixed value perhaps depending on the language,
8313 but we don't handle values other than 1 correctly yet.) */
8314 tem = fold_truth_not_expr (loc, arg0);
8315 if (!tem)
8316 return NULL_TREE;
8317 return fold_convert_loc (loc, type, tem);
8319 case REALPART_EXPR:
8320 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8321 return fold_convert_loc (loc, type, arg0);
8322 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8323 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8324 TREE_OPERAND (arg0, 1));
8325 if (TREE_CODE (arg0) == COMPLEX_CST)
8326 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8327 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8329 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8330 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8331 fold_build1_loc (loc, REALPART_EXPR, itype,
8332 TREE_OPERAND (arg0, 0)),
8333 fold_build1_loc (loc, REALPART_EXPR, itype,
8334 TREE_OPERAND (arg0, 1)));
8335 return fold_convert_loc (loc, type, tem);
8337 if (TREE_CODE (arg0) == CONJ_EXPR)
8339 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8340 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8341 TREE_OPERAND (arg0, 0));
8342 return fold_convert_loc (loc, type, tem);
8344 if (TREE_CODE (arg0) == CALL_EXPR)
8346 tree fn = get_callee_fndecl (arg0);
8347 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8348 switch (DECL_FUNCTION_CODE (fn))
8350 CASE_FLT_FN (BUILT_IN_CEXPI):
8351 fn = mathfn_built_in (type, BUILT_IN_COS);
8352 if (fn)
8353 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8354 break;
8356 default:
8357 break;
8360 return NULL_TREE;
8362 case IMAGPART_EXPR:
8363 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8364 return build_zero_cst (type);
8365 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8366 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8367 TREE_OPERAND (arg0, 0));
8368 if (TREE_CODE (arg0) == COMPLEX_CST)
8369 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8370 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8372 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8373 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8374 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8375 TREE_OPERAND (arg0, 0)),
8376 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8377 TREE_OPERAND (arg0, 1)));
8378 return fold_convert_loc (loc, type, tem);
8380 if (TREE_CODE (arg0) == CONJ_EXPR)
8382 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8383 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8384 return fold_convert_loc (loc, type, negate_expr (tem));
8386 if (TREE_CODE (arg0) == CALL_EXPR)
8388 tree fn = get_callee_fndecl (arg0);
8389 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8390 switch (DECL_FUNCTION_CODE (fn))
8392 CASE_FLT_FN (BUILT_IN_CEXPI):
8393 fn = mathfn_built_in (type, BUILT_IN_SIN);
8394 if (fn)
8395 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8396 break;
8398 default:
8399 break;
8402 return NULL_TREE;
8404 case INDIRECT_REF:
8405 /* Fold *&X to X if X is an lvalue. */
8406 if (TREE_CODE (op0) == ADDR_EXPR)
8408 tree op00 = TREE_OPERAND (op0, 0);
8409 if ((TREE_CODE (op00) == VAR_DECL
8410 || TREE_CODE (op00) == PARM_DECL
8411 || TREE_CODE (op00) == RESULT_DECL)
8412 && !TREE_READONLY (op00))
8413 return op00;
8415 return NULL_TREE;
8417 case VEC_UNPACK_LO_EXPR:
8418 case VEC_UNPACK_HI_EXPR:
8419 case VEC_UNPACK_FLOAT_LO_EXPR:
8420 case VEC_UNPACK_FLOAT_HI_EXPR:
8422 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8423 tree *elts;
8424 enum tree_code subcode;
8426 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8427 if (TREE_CODE (arg0) != VECTOR_CST)
8428 return NULL_TREE;
8430 elts = XALLOCAVEC (tree, nelts * 2);
8431 if (!vec_cst_ctor_to_array (arg0, elts))
8432 return NULL_TREE;
8434 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8435 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8436 elts += nelts;
8438 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8439 subcode = NOP_EXPR;
8440 else
8441 subcode = FLOAT_EXPR;
8443 for (i = 0; i < nelts; i++)
8445 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8446 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8447 return NULL_TREE;
8450 return build_vector (type, elts);
8453 case REDUC_MIN_EXPR:
8454 case REDUC_MAX_EXPR:
8455 case REDUC_PLUS_EXPR:
8457 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8458 tree *elts;
8459 enum tree_code subcode;
8461 if (TREE_CODE (op0) != VECTOR_CST)
8462 return NULL_TREE;
8464 elts = XALLOCAVEC (tree, nelts);
8465 if (!vec_cst_ctor_to_array (op0, elts))
8466 return NULL_TREE;
8468 switch (code)
8470 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8471 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8472 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8473 default: gcc_unreachable ();
8476 for (i = 1; i < nelts; i++)
8478 elts[0] = const_binop (subcode, elts[0], elts[i]);
8479 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8480 return NULL_TREE;
8481 elts[i] = build_zero_cst (TREE_TYPE (type));
8484 return build_vector (type, elts);
8487 default:
8488 return NULL_TREE;
8489 } /* switch (code) */
8493 /* If the operation was a conversion do _not_ mark a resulting constant
8494 with TREE_OVERFLOW if the original constant was not. These conversions
8495 have implementation defined behavior and retaining the TREE_OVERFLOW
8496 flag here would confuse later passes such as VRP. */
8497 tree
8498 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8499 tree type, tree op0)
8501 tree res = fold_unary_loc (loc, code, type, op0);
8502 if (res
8503 && TREE_CODE (res) == INTEGER_CST
8504 && TREE_CODE (op0) == INTEGER_CST
8505 && CONVERT_EXPR_CODE_P (code))
8506 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8508 return res;
8511 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8512 operands OP0 and OP1. LOC is the location of the resulting expression.
8513 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8514 Return the folded expression if folding is successful. Otherwise,
8515 return NULL_TREE. */
8516 static tree
8517 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8518 tree arg0, tree arg1, tree op0, tree op1)
8520 tree tem;
8522 /* We only do these simplifications if we are optimizing. */
8523 if (!optimize)
8524 return NULL_TREE;
8526 /* Check for things like (A || B) && (A || C). We can convert this
8527 to A || (B && C). Note that either operator can be any of the four
8528 truth and/or operations and the transformation will still be
8529 valid. Also note that we only care about order for the
8530 ANDIF and ORIF operators. If B contains side effects, this
8531 might change the truth-value of A. */
8532 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8533 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8534 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8535 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8536 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8537 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8539 tree a00 = TREE_OPERAND (arg0, 0);
8540 tree a01 = TREE_OPERAND (arg0, 1);
8541 tree a10 = TREE_OPERAND (arg1, 0);
8542 tree a11 = TREE_OPERAND (arg1, 1);
8543 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8544 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8545 && (code == TRUTH_AND_EXPR
8546 || code == TRUTH_OR_EXPR));
8548 if (operand_equal_p (a00, a10, 0))
8549 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8550 fold_build2_loc (loc, code, type, a01, a11));
8551 else if (commutative && operand_equal_p (a00, a11, 0))
8552 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8553 fold_build2_loc (loc, code, type, a01, a10));
8554 else if (commutative && operand_equal_p (a01, a10, 0))
8555 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8556 fold_build2_loc (loc, code, type, a00, a11));
8558 /* This case if tricky because we must either have commutative
8559 operators or else A10 must not have side-effects. */
8561 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8562 && operand_equal_p (a01, a11, 0))
8563 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8564 fold_build2_loc (loc, code, type, a00, a10),
8565 a01);
8568 /* See if we can build a range comparison. */
8569 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8570 return tem;
8572 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8573 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8575 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8576 if (tem)
8577 return fold_build2_loc (loc, code, type, tem, arg1);
8580 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8581 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8583 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8584 if (tem)
8585 return fold_build2_loc (loc, code, type, arg0, tem);
8588 /* Check for the possibility of merging component references. If our
8589 lhs is another similar operation, try to merge its rhs with our
8590 rhs. Then try to merge our lhs and rhs. */
8591 if (TREE_CODE (arg0) == code
8592 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8593 TREE_OPERAND (arg0, 1), arg1)))
8594 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8596 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8597 return tem;
8599 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8600 && (code == TRUTH_AND_EXPR
8601 || code == TRUTH_ANDIF_EXPR
8602 || code == TRUTH_OR_EXPR
8603 || code == TRUTH_ORIF_EXPR))
8605 enum tree_code ncode, icode;
8607 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8608 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8609 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8611 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8612 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8613 We don't want to pack more than two leafs to a non-IF AND/OR
8614 expression.
8615 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8616 equal to IF-CODE, then we don't want to add right-hand operand.
8617 If the inner right-hand side of left-hand operand has
8618 side-effects, or isn't simple, then we can't add to it,
8619 as otherwise we might destroy if-sequence. */
8620 if (TREE_CODE (arg0) == icode
8621 && simple_operand_p_2 (arg1)
8622 /* Needed for sequence points to handle trappings, and
8623 side-effects. */
8624 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8626 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8627 arg1);
8628 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8629 tem);
8631 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8632 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8633 else if (TREE_CODE (arg1) == icode
8634 && simple_operand_p_2 (arg0)
8635 /* Needed for sequence points to handle trappings, and
8636 side-effects. */
8637 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8639 tem = fold_build2_loc (loc, ncode, type,
8640 arg0, TREE_OPERAND (arg1, 0));
8641 return fold_build2_loc (loc, icode, type, tem,
8642 TREE_OPERAND (arg1, 1));
8644 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8645 into (A OR B).
8646 For sequence point consistancy, we need to check for trapping,
8647 and side-effects. */
8648 else if (code == icode && simple_operand_p_2 (arg0)
8649 && simple_operand_p_2 (arg1))
8650 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8653 return NULL_TREE;
8656 /* Fold a binary expression of code CODE and type TYPE with operands
8657 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8658 Return the folded expression if folding is successful. Otherwise,
8659 return NULL_TREE. */
8661 static tree
8662 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8664 enum tree_code compl_code;
8666 if (code == MIN_EXPR)
8667 compl_code = MAX_EXPR;
8668 else if (code == MAX_EXPR)
8669 compl_code = MIN_EXPR;
8670 else
8671 gcc_unreachable ();
8673 /* MIN (MAX (a, b), b) == b. */
8674 if (TREE_CODE (op0) == compl_code
8675 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8676 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8678 /* MIN (MAX (b, a), b) == b. */
8679 if (TREE_CODE (op0) == compl_code
8680 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8681 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8682 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8684 /* MIN (a, MAX (a, b)) == a. */
8685 if (TREE_CODE (op1) == compl_code
8686 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8687 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8688 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8690 /* MIN (a, MAX (b, a)) == a. */
8691 if (TREE_CODE (op1) == compl_code
8692 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8693 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8694 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8696 return NULL_TREE;
8699 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8700 by changing CODE to reduce the magnitude of constants involved in
8701 ARG0 of the comparison.
8702 Returns a canonicalized comparison tree if a simplification was
8703 possible, otherwise returns NULL_TREE.
8704 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8705 valid if signed overflow is undefined. */
8707 static tree
8708 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8709 tree arg0, tree arg1,
8710 bool *strict_overflow_p)
8712 enum tree_code code0 = TREE_CODE (arg0);
8713 tree t, cst0 = NULL_TREE;
8714 int sgn0;
8715 bool swap = false;
8717 /* Match A +- CST code arg1 and CST code arg1. We can change the
8718 first form only if overflow is undefined. */
8719 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8720 /* In principle pointers also have undefined overflow behavior,
8721 but that causes problems elsewhere. */
8722 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8723 && (code0 == MINUS_EXPR
8724 || code0 == PLUS_EXPR)
8725 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8726 || code0 == INTEGER_CST))
8727 return NULL_TREE;
8729 /* Identify the constant in arg0 and its sign. */
8730 if (code0 == INTEGER_CST)
8731 cst0 = arg0;
8732 else
8733 cst0 = TREE_OPERAND (arg0, 1);
8734 sgn0 = tree_int_cst_sgn (cst0);
8736 /* Overflowed constants and zero will cause problems. */
8737 if (integer_zerop (cst0)
8738 || TREE_OVERFLOW (cst0))
8739 return NULL_TREE;
8741 /* See if we can reduce the magnitude of the constant in
8742 arg0 by changing the comparison code. */
8743 if (code0 == INTEGER_CST)
8745 /* CST <= arg1 -> CST-1 < arg1. */
8746 if (code == LE_EXPR && sgn0 == 1)
8747 code = LT_EXPR;
8748 /* -CST < arg1 -> -CST-1 <= arg1. */
8749 else if (code == LT_EXPR && sgn0 == -1)
8750 code = LE_EXPR;
8751 /* CST > arg1 -> CST-1 >= arg1. */
8752 else if (code == GT_EXPR && sgn0 == 1)
8753 code = GE_EXPR;
8754 /* -CST >= arg1 -> -CST-1 > arg1. */
8755 else if (code == GE_EXPR && sgn0 == -1)
8756 code = GT_EXPR;
8757 else
8758 return NULL_TREE;
8759 /* arg1 code' CST' might be more canonical. */
8760 swap = true;
8762 else
8764 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8765 if (code == LT_EXPR
8766 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8767 code = LE_EXPR;
8768 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8769 else if (code == GT_EXPR
8770 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8771 code = GE_EXPR;
8772 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8773 else if (code == LE_EXPR
8774 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8775 code = LT_EXPR;
8776 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8777 else if (code == GE_EXPR
8778 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8779 code = GT_EXPR;
8780 else
8781 return NULL_TREE;
8782 *strict_overflow_p = true;
8785 /* Now build the constant reduced in magnitude. But not if that
8786 would produce one outside of its types range. */
8787 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8788 && ((sgn0 == 1
8789 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8790 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8791 || (sgn0 == -1
8792 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8793 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8794 /* We cannot swap the comparison here as that would cause us to
8795 endlessly recurse. */
8796 return NULL_TREE;
8798 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8799 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8800 if (code0 != INTEGER_CST)
8801 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8802 t = fold_convert (TREE_TYPE (arg1), t);
8804 /* If swapping might yield to a more canonical form, do so. */
8805 if (swap)
8806 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8807 else
8808 return fold_build2_loc (loc, code, type, t, arg1);
8811 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8812 overflow further. Try to decrease the magnitude of constants involved
8813 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8814 and put sole constants at the second argument position.
8815 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8817 static tree
8818 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8819 tree arg0, tree arg1)
8821 tree t;
8822 bool strict_overflow_p;
8823 const char * const warnmsg = G_("assuming signed overflow does not occur "
8824 "when reducing constant in comparison");
8826 /* Try canonicalization by simplifying arg0. */
8827 strict_overflow_p = false;
8828 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8829 &strict_overflow_p);
8830 if (t)
8832 if (strict_overflow_p)
8833 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8834 return t;
8837 /* Try canonicalization by simplifying arg1 using the swapped
8838 comparison. */
8839 code = swap_tree_comparison (code);
8840 strict_overflow_p = false;
8841 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8842 &strict_overflow_p);
8843 if (t && strict_overflow_p)
8844 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8845 return t;
8848 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8849 space. This is used to avoid issuing overflow warnings for
8850 expressions like &p->x which can not wrap. */
8852 static bool
8853 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8855 double_int di_offset, total;
8857 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8858 return true;
8860 if (bitpos < 0)
8861 return true;
8863 if (offset == NULL_TREE)
8864 di_offset = double_int_zero;
8865 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8866 return true;
8867 else
8868 di_offset = TREE_INT_CST (offset);
8870 bool overflow;
8871 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8872 total = di_offset.add_with_sign (units, true, &overflow);
8873 if (overflow)
8874 return true;
8876 if (total.high != 0)
8877 return true;
8879 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8880 if (size <= 0)
8881 return true;
8883 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8884 array. */
8885 if (TREE_CODE (base) == ADDR_EXPR)
8887 HOST_WIDE_INT base_size;
8889 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8890 if (base_size > 0 && size < base_size)
8891 size = base_size;
8894 return total.low > (unsigned HOST_WIDE_INT) size;
8897 /* Subroutine of fold_binary. This routine performs all of the
8898 transformations that are common to the equality/inequality
8899 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8900 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8901 fold_binary should call fold_binary. Fold a comparison with
8902 tree code CODE and type TYPE with operands OP0 and OP1. Return
8903 the folded comparison or NULL_TREE. */
8905 static tree
8906 fold_comparison (location_t loc, enum tree_code code, tree type,
8907 tree op0, tree op1)
8909 tree arg0, arg1, tem;
8911 arg0 = op0;
8912 arg1 = op1;
8914 STRIP_SIGN_NOPS (arg0);
8915 STRIP_SIGN_NOPS (arg1);
8917 tem = fold_relational_const (code, type, arg0, arg1);
8918 if (tem != NULL_TREE)
8919 return tem;
8921 /* If one arg is a real or integer constant, put it last. */
8922 if (tree_swap_operands_p (arg0, arg1, true))
8923 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8925 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8926 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8927 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8928 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8930 && (TREE_CODE (arg1) == INTEGER_CST
8931 && !TREE_OVERFLOW (arg1)))
8933 tree const1 = TREE_OPERAND (arg0, 1);
8934 tree const2 = arg1;
8935 tree variable = TREE_OPERAND (arg0, 0);
8936 tree lhs;
8937 int lhs_add;
8938 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8940 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8941 TREE_TYPE (arg1), const2, const1);
8943 /* If the constant operation overflowed this can be
8944 simplified as a comparison against INT_MAX/INT_MIN. */
8945 if (TREE_CODE (lhs) == INTEGER_CST
8946 && TREE_OVERFLOW (lhs))
8948 int const1_sgn = tree_int_cst_sgn (const1);
8949 enum tree_code code2 = code;
8951 /* Get the sign of the constant on the lhs if the
8952 operation were VARIABLE + CONST1. */
8953 if (TREE_CODE (arg0) == MINUS_EXPR)
8954 const1_sgn = -const1_sgn;
8956 /* The sign of the constant determines if we overflowed
8957 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8958 Canonicalize to the INT_MIN overflow by swapping the comparison
8959 if necessary. */
8960 if (const1_sgn == -1)
8961 code2 = swap_tree_comparison (code);
8963 /* We now can look at the canonicalized case
8964 VARIABLE + 1 CODE2 INT_MIN
8965 and decide on the result. */
8966 if (code2 == LT_EXPR
8967 || code2 == LE_EXPR
8968 || code2 == EQ_EXPR)
8969 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8970 else if (code2 == NE_EXPR
8971 || code2 == GE_EXPR
8972 || code2 == GT_EXPR)
8973 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8976 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8977 && (TREE_CODE (lhs) != INTEGER_CST
8978 || !TREE_OVERFLOW (lhs)))
8980 if (code != EQ_EXPR && code != NE_EXPR)
8981 fold_overflow_warning ("assuming signed overflow does not occur "
8982 "when changing X +- C1 cmp C2 to "
8983 "X cmp C1 +- C2",
8984 WARN_STRICT_OVERFLOW_COMPARISON);
8985 return fold_build2_loc (loc, code, type, variable, lhs);
8989 /* For comparisons of pointers we can decompose it to a compile time
8990 comparison of the base objects and the offsets into the object.
8991 This requires at least one operand being an ADDR_EXPR or a
8992 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8993 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8994 && (TREE_CODE (arg0) == ADDR_EXPR
8995 || TREE_CODE (arg1) == ADDR_EXPR
8996 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8997 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8999 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9000 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9001 enum machine_mode mode;
9002 int volatilep, unsignedp;
9003 bool indirect_base0 = false, indirect_base1 = false;
9005 /* Get base and offset for the access. Strip ADDR_EXPR for
9006 get_inner_reference, but put it back by stripping INDIRECT_REF
9007 off the base object if possible. indirect_baseN will be true
9008 if baseN is not an address but refers to the object itself. */
9009 base0 = arg0;
9010 if (TREE_CODE (arg0) == ADDR_EXPR)
9012 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9013 &bitsize, &bitpos0, &offset0, &mode,
9014 &unsignedp, &volatilep, false);
9015 if (TREE_CODE (base0) == INDIRECT_REF)
9016 base0 = TREE_OPERAND (base0, 0);
9017 else
9018 indirect_base0 = true;
9020 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9022 base0 = TREE_OPERAND (arg0, 0);
9023 STRIP_SIGN_NOPS (base0);
9024 if (TREE_CODE (base0) == ADDR_EXPR)
9026 base0 = TREE_OPERAND (base0, 0);
9027 indirect_base0 = true;
9029 offset0 = TREE_OPERAND (arg0, 1);
9030 if (host_integerp (offset0, 0))
9032 HOST_WIDE_INT off = size_low_cst (offset0);
9033 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9034 * BITS_PER_UNIT)
9035 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9037 bitpos0 = off * BITS_PER_UNIT;
9038 offset0 = NULL_TREE;
9043 base1 = arg1;
9044 if (TREE_CODE (arg1) == ADDR_EXPR)
9046 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9047 &bitsize, &bitpos1, &offset1, &mode,
9048 &unsignedp, &volatilep, false);
9049 if (TREE_CODE (base1) == INDIRECT_REF)
9050 base1 = TREE_OPERAND (base1, 0);
9051 else
9052 indirect_base1 = true;
9054 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9056 base1 = TREE_OPERAND (arg1, 0);
9057 STRIP_SIGN_NOPS (base1);
9058 if (TREE_CODE (base1) == ADDR_EXPR)
9060 base1 = TREE_OPERAND (base1, 0);
9061 indirect_base1 = true;
9063 offset1 = TREE_OPERAND (arg1, 1);
9064 if (host_integerp (offset1, 0))
9066 HOST_WIDE_INT off = size_low_cst (offset1);
9067 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9068 * BITS_PER_UNIT)
9069 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9071 bitpos1 = off * BITS_PER_UNIT;
9072 offset1 = NULL_TREE;
9077 /* A local variable can never be pointed to by
9078 the default SSA name of an incoming parameter. */
9079 if ((TREE_CODE (arg0) == ADDR_EXPR
9080 && indirect_base0
9081 && TREE_CODE (base0) == VAR_DECL
9082 && auto_var_in_fn_p (base0, current_function_decl)
9083 && !indirect_base1
9084 && TREE_CODE (base1) == SSA_NAME
9085 && SSA_NAME_IS_DEFAULT_DEF (base1)
9086 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9087 || (TREE_CODE (arg1) == ADDR_EXPR
9088 && indirect_base1
9089 && TREE_CODE (base1) == VAR_DECL
9090 && auto_var_in_fn_p (base1, current_function_decl)
9091 && !indirect_base0
9092 && TREE_CODE (base0) == SSA_NAME
9093 && SSA_NAME_IS_DEFAULT_DEF (base0)
9094 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9096 if (code == NE_EXPR)
9097 return constant_boolean_node (1, type);
9098 else if (code == EQ_EXPR)
9099 return constant_boolean_node (0, type);
9101 /* If we have equivalent bases we might be able to simplify. */
9102 else if (indirect_base0 == indirect_base1
9103 && operand_equal_p (base0, base1, 0))
9105 /* We can fold this expression to a constant if the non-constant
9106 offset parts are equal. */
9107 if ((offset0 == offset1
9108 || (offset0 && offset1
9109 && operand_equal_p (offset0, offset1, 0)))
9110 && (code == EQ_EXPR
9111 || code == NE_EXPR
9112 || (indirect_base0 && DECL_P (base0))
9113 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9116 if (code != EQ_EXPR
9117 && code != NE_EXPR
9118 && bitpos0 != bitpos1
9119 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9120 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9121 fold_overflow_warning (("assuming pointer wraparound does not "
9122 "occur when comparing P +- C1 with "
9123 "P +- C2"),
9124 WARN_STRICT_OVERFLOW_CONDITIONAL);
9126 switch (code)
9128 case EQ_EXPR:
9129 return constant_boolean_node (bitpos0 == bitpos1, type);
9130 case NE_EXPR:
9131 return constant_boolean_node (bitpos0 != bitpos1, type);
9132 case LT_EXPR:
9133 return constant_boolean_node (bitpos0 < bitpos1, type);
9134 case LE_EXPR:
9135 return constant_boolean_node (bitpos0 <= bitpos1, type);
9136 case GE_EXPR:
9137 return constant_boolean_node (bitpos0 >= bitpos1, type);
9138 case GT_EXPR:
9139 return constant_boolean_node (bitpos0 > bitpos1, type);
9140 default:;
9143 /* We can simplify the comparison to a comparison of the variable
9144 offset parts if the constant offset parts are equal.
9145 Be careful to use signed sizetype here because otherwise we
9146 mess with array offsets in the wrong way. This is possible
9147 because pointer arithmetic is restricted to retain within an
9148 object and overflow on pointer differences is undefined as of
9149 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9150 else if (bitpos0 == bitpos1
9151 && ((code == EQ_EXPR || code == NE_EXPR)
9152 || (indirect_base0 && DECL_P (base0))
9153 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9155 /* By converting to signed sizetype we cover middle-end pointer
9156 arithmetic which operates on unsigned pointer types of size
9157 type size and ARRAY_REF offsets which are properly sign or
9158 zero extended from their type in case it is narrower than
9159 sizetype. */
9160 if (offset0 == NULL_TREE)
9161 offset0 = build_int_cst (ssizetype, 0);
9162 else
9163 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9164 if (offset1 == NULL_TREE)
9165 offset1 = build_int_cst (ssizetype, 0);
9166 else
9167 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9169 if (code != EQ_EXPR
9170 && code != NE_EXPR
9171 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9172 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9173 fold_overflow_warning (("assuming pointer wraparound does not "
9174 "occur when comparing P +- C1 with "
9175 "P +- C2"),
9176 WARN_STRICT_OVERFLOW_COMPARISON);
9178 return fold_build2_loc (loc, code, type, offset0, offset1);
9181 /* For non-equal bases we can simplify if they are addresses
9182 of local binding decls or constants. */
9183 else if (indirect_base0 && indirect_base1
9184 /* We know that !operand_equal_p (base0, base1, 0)
9185 because the if condition was false. But make
9186 sure two decls are not the same. */
9187 && base0 != base1
9188 && TREE_CODE (arg0) == ADDR_EXPR
9189 && TREE_CODE (arg1) == ADDR_EXPR
9190 && (((TREE_CODE (base0) == VAR_DECL
9191 || TREE_CODE (base0) == PARM_DECL)
9192 && (targetm.binds_local_p (base0)
9193 || CONSTANT_CLASS_P (base1)))
9194 || CONSTANT_CLASS_P (base0))
9195 && (((TREE_CODE (base1) == VAR_DECL
9196 || TREE_CODE (base1) == PARM_DECL)
9197 && (targetm.binds_local_p (base1)
9198 || CONSTANT_CLASS_P (base0)))
9199 || CONSTANT_CLASS_P (base1)))
9201 if (code == EQ_EXPR)
9202 return omit_two_operands_loc (loc, type, boolean_false_node,
9203 arg0, arg1);
9204 else if (code == NE_EXPR)
9205 return omit_two_operands_loc (loc, type, boolean_true_node,
9206 arg0, arg1);
9208 /* For equal offsets we can simplify to a comparison of the
9209 base addresses. */
9210 else if (bitpos0 == bitpos1
9211 && (indirect_base0
9212 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9213 && (indirect_base1
9214 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9215 && ((offset0 == offset1)
9216 || (offset0 && offset1
9217 && operand_equal_p (offset0, offset1, 0))))
9219 if (indirect_base0)
9220 base0 = build_fold_addr_expr_loc (loc, base0);
9221 if (indirect_base1)
9222 base1 = build_fold_addr_expr_loc (loc, base1);
9223 return fold_build2_loc (loc, code, type, base0, base1);
9227 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9228 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9229 the resulting offset is smaller in absolute value than the
9230 original one. */
9231 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9232 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9233 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9234 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9235 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9236 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9237 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9239 tree const1 = TREE_OPERAND (arg0, 1);
9240 tree const2 = TREE_OPERAND (arg1, 1);
9241 tree variable1 = TREE_OPERAND (arg0, 0);
9242 tree variable2 = TREE_OPERAND (arg1, 0);
9243 tree cst;
9244 const char * const warnmsg = G_("assuming signed overflow does not "
9245 "occur when combining constants around "
9246 "a comparison");
9248 /* Put the constant on the side where it doesn't overflow and is
9249 of lower absolute value than before. */
9250 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9251 ? MINUS_EXPR : PLUS_EXPR,
9252 const2, const1);
9253 if (!TREE_OVERFLOW (cst)
9254 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9256 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9257 return fold_build2_loc (loc, code, type,
9258 variable1,
9259 fold_build2_loc (loc,
9260 TREE_CODE (arg1), TREE_TYPE (arg1),
9261 variable2, cst));
9264 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9265 ? MINUS_EXPR : PLUS_EXPR,
9266 const1, const2);
9267 if (!TREE_OVERFLOW (cst)
9268 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9270 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9271 return fold_build2_loc (loc, code, type,
9272 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9273 variable1, cst),
9274 variable2);
9278 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9279 signed arithmetic case. That form is created by the compiler
9280 often enough for folding it to be of value. One example is in
9281 computing loop trip counts after Operator Strength Reduction. */
9282 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9283 && TREE_CODE (arg0) == MULT_EXPR
9284 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9285 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9286 && integer_zerop (arg1))
9288 tree const1 = TREE_OPERAND (arg0, 1);
9289 tree const2 = arg1; /* zero */
9290 tree variable1 = TREE_OPERAND (arg0, 0);
9291 enum tree_code cmp_code = code;
9293 /* Handle unfolded multiplication by zero. */
9294 if (integer_zerop (const1))
9295 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9297 fold_overflow_warning (("assuming signed overflow does not occur when "
9298 "eliminating multiplication in comparison "
9299 "with zero"),
9300 WARN_STRICT_OVERFLOW_COMPARISON);
9302 /* If const1 is negative we swap the sense of the comparison. */
9303 if (tree_int_cst_sgn (const1) < 0)
9304 cmp_code = swap_tree_comparison (cmp_code);
9306 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9309 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9310 if (tem)
9311 return tem;
9313 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9315 tree targ0 = strip_float_extensions (arg0);
9316 tree targ1 = strip_float_extensions (arg1);
9317 tree newtype = TREE_TYPE (targ0);
9319 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9320 newtype = TREE_TYPE (targ1);
9322 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9323 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9324 return fold_build2_loc (loc, code, type,
9325 fold_convert_loc (loc, newtype, targ0),
9326 fold_convert_loc (loc, newtype, targ1));
9328 /* (-a) CMP (-b) -> b CMP a */
9329 if (TREE_CODE (arg0) == NEGATE_EXPR
9330 && TREE_CODE (arg1) == NEGATE_EXPR)
9331 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9332 TREE_OPERAND (arg0, 0));
9334 if (TREE_CODE (arg1) == REAL_CST)
9336 REAL_VALUE_TYPE cst;
9337 cst = TREE_REAL_CST (arg1);
9339 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9340 if (TREE_CODE (arg0) == NEGATE_EXPR)
9341 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9342 TREE_OPERAND (arg0, 0),
9343 build_real (TREE_TYPE (arg1),
9344 real_value_negate (&cst)));
9346 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9347 /* a CMP (-0) -> a CMP 0 */
9348 if (REAL_VALUE_MINUS_ZERO (cst))
9349 return fold_build2_loc (loc, code, type, arg0,
9350 build_real (TREE_TYPE (arg1), dconst0));
9352 /* x != NaN is always true, other ops are always false. */
9353 if (REAL_VALUE_ISNAN (cst)
9354 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9356 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9357 return omit_one_operand_loc (loc, type, tem, arg0);
9360 /* Fold comparisons against infinity. */
9361 if (REAL_VALUE_ISINF (cst)
9362 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9364 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9365 if (tem != NULL_TREE)
9366 return tem;
9370 /* If this is a comparison of a real constant with a PLUS_EXPR
9371 or a MINUS_EXPR of a real constant, we can convert it into a
9372 comparison with a revised real constant as long as no overflow
9373 occurs when unsafe_math_optimizations are enabled. */
9374 if (flag_unsafe_math_optimizations
9375 && TREE_CODE (arg1) == REAL_CST
9376 && (TREE_CODE (arg0) == PLUS_EXPR
9377 || TREE_CODE (arg0) == MINUS_EXPR)
9378 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9379 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9380 ? MINUS_EXPR : PLUS_EXPR,
9381 arg1, TREE_OPERAND (arg0, 1)))
9382 && !TREE_OVERFLOW (tem))
9383 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9385 /* Likewise, we can simplify a comparison of a real constant with
9386 a MINUS_EXPR whose first operand is also a real constant, i.e.
9387 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9388 floating-point types only if -fassociative-math is set. */
9389 if (flag_associative_math
9390 && TREE_CODE (arg1) == REAL_CST
9391 && TREE_CODE (arg0) == MINUS_EXPR
9392 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9393 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9394 arg1))
9395 && !TREE_OVERFLOW (tem))
9396 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9397 TREE_OPERAND (arg0, 1), tem);
9399 /* Fold comparisons against built-in math functions. */
9400 if (TREE_CODE (arg1) == REAL_CST
9401 && flag_unsafe_math_optimizations
9402 && ! flag_errno_math)
9404 enum built_in_function fcode = builtin_mathfn_code (arg0);
9406 if (fcode != END_BUILTINS)
9408 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9409 if (tem != NULL_TREE)
9410 return tem;
9415 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9416 && CONVERT_EXPR_P (arg0))
9418 /* If we are widening one operand of an integer comparison,
9419 see if the other operand is similarly being widened. Perhaps we
9420 can do the comparison in the narrower type. */
9421 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9422 if (tem)
9423 return tem;
9425 /* Or if we are changing signedness. */
9426 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9427 if (tem)
9428 return tem;
9431 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9432 constant, we can simplify it. */
9433 if (TREE_CODE (arg1) == INTEGER_CST
9434 && (TREE_CODE (arg0) == MIN_EXPR
9435 || TREE_CODE (arg0) == MAX_EXPR)
9436 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9438 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9439 if (tem)
9440 return tem;
9443 /* Simplify comparison of something with itself. (For IEEE
9444 floating-point, we can only do some of these simplifications.) */
9445 if (operand_equal_p (arg0, arg1, 0))
9447 switch (code)
9449 case EQ_EXPR:
9450 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9451 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9452 return constant_boolean_node (1, type);
9453 break;
9455 case GE_EXPR:
9456 case LE_EXPR:
9457 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9458 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9459 return constant_boolean_node (1, type);
9460 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9462 case NE_EXPR:
9463 /* For NE, we can only do this simplification if integer
9464 or we don't honor IEEE floating point NaNs. */
9465 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9466 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9467 break;
9468 /* ... fall through ... */
9469 case GT_EXPR:
9470 case LT_EXPR:
9471 return constant_boolean_node (0, type);
9472 default:
9473 gcc_unreachable ();
9477 /* If we are comparing an expression that just has comparisons
9478 of two integer values, arithmetic expressions of those comparisons,
9479 and constants, we can simplify it. There are only three cases
9480 to check: the two values can either be equal, the first can be
9481 greater, or the second can be greater. Fold the expression for
9482 those three values. Since each value must be 0 or 1, we have
9483 eight possibilities, each of which corresponds to the constant 0
9484 or 1 or one of the six possible comparisons.
9486 This handles common cases like (a > b) == 0 but also handles
9487 expressions like ((x > y) - (y > x)) > 0, which supposedly
9488 occur in macroized code. */
9490 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9492 tree cval1 = 0, cval2 = 0;
9493 int save_p = 0;
9495 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9496 /* Don't handle degenerate cases here; they should already
9497 have been handled anyway. */
9498 && cval1 != 0 && cval2 != 0
9499 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9500 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9501 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9502 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9503 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9504 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9505 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9507 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9508 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9510 /* We can't just pass T to eval_subst in case cval1 or cval2
9511 was the same as ARG1. */
9513 tree high_result
9514 = fold_build2_loc (loc, code, type,
9515 eval_subst (loc, arg0, cval1, maxval,
9516 cval2, minval),
9517 arg1);
9518 tree equal_result
9519 = fold_build2_loc (loc, code, type,
9520 eval_subst (loc, arg0, cval1, maxval,
9521 cval2, maxval),
9522 arg1);
9523 tree low_result
9524 = fold_build2_loc (loc, code, type,
9525 eval_subst (loc, arg0, cval1, minval,
9526 cval2, maxval),
9527 arg1);
9529 /* All three of these results should be 0 or 1. Confirm they are.
9530 Then use those values to select the proper code to use. */
9532 if (TREE_CODE (high_result) == INTEGER_CST
9533 && TREE_CODE (equal_result) == INTEGER_CST
9534 && TREE_CODE (low_result) == INTEGER_CST)
9536 /* Make a 3-bit mask with the high-order bit being the
9537 value for `>', the next for '=', and the low for '<'. */
9538 switch ((integer_onep (high_result) * 4)
9539 + (integer_onep (equal_result) * 2)
9540 + integer_onep (low_result))
9542 case 0:
9543 /* Always false. */
9544 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9545 case 1:
9546 code = LT_EXPR;
9547 break;
9548 case 2:
9549 code = EQ_EXPR;
9550 break;
9551 case 3:
9552 code = LE_EXPR;
9553 break;
9554 case 4:
9555 code = GT_EXPR;
9556 break;
9557 case 5:
9558 code = NE_EXPR;
9559 break;
9560 case 6:
9561 code = GE_EXPR;
9562 break;
9563 case 7:
9564 /* Always true. */
9565 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9568 if (save_p)
9570 tem = save_expr (build2 (code, type, cval1, cval2));
9571 SET_EXPR_LOCATION (tem, loc);
9572 return tem;
9574 return fold_build2_loc (loc, code, type, cval1, cval2);
9579 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9580 into a single range test. */
9581 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9582 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9583 && TREE_CODE (arg1) == INTEGER_CST
9584 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9585 && !integer_zerop (TREE_OPERAND (arg0, 1))
9586 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9587 && !TREE_OVERFLOW (arg1))
9589 tem = fold_div_compare (loc, code, type, arg0, arg1);
9590 if (tem != NULL_TREE)
9591 return tem;
9594 /* Fold ~X op ~Y as Y op X. */
9595 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9596 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9598 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9599 return fold_build2_loc (loc, code, type,
9600 fold_convert_loc (loc, cmp_type,
9601 TREE_OPERAND (arg1, 0)),
9602 TREE_OPERAND (arg0, 0));
9605 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9606 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9607 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9609 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9610 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9611 TREE_OPERAND (arg0, 0),
9612 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9613 fold_convert_loc (loc, cmp_type, arg1)));
9616 return NULL_TREE;
9620 /* Subroutine of fold_binary. Optimize complex multiplications of the
9621 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9622 argument EXPR represents the expression "z" of type TYPE. */
9624 static tree
9625 fold_mult_zconjz (location_t loc, tree type, tree expr)
9627 tree itype = TREE_TYPE (type);
9628 tree rpart, ipart, tem;
9630 if (TREE_CODE (expr) == COMPLEX_EXPR)
9632 rpart = TREE_OPERAND (expr, 0);
9633 ipart = TREE_OPERAND (expr, 1);
9635 else if (TREE_CODE (expr) == COMPLEX_CST)
9637 rpart = TREE_REALPART (expr);
9638 ipart = TREE_IMAGPART (expr);
9640 else
9642 expr = save_expr (expr);
9643 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9644 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9647 rpart = save_expr (rpart);
9648 ipart = save_expr (ipart);
9649 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9650 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9651 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9652 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9653 build_zero_cst (itype));
9657 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9658 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9659 guarantees that P and N have the same least significant log2(M) bits.
9660 N is not otherwise constrained. In particular, N is not normalized to
9661 0 <= N < M as is common. In general, the precise value of P is unknown.
9662 M is chosen as large as possible such that constant N can be determined.
9664 Returns M and sets *RESIDUE to N.
9666 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9667 account. This is not always possible due to PR 35705.
9670 static unsigned HOST_WIDE_INT
9671 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9672 bool allow_func_align)
9674 enum tree_code code;
9676 *residue = 0;
9678 code = TREE_CODE (expr);
9679 if (code == ADDR_EXPR)
9681 unsigned int bitalign;
9682 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9683 *residue /= BITS_PER_UNIT;
9684 return bitalign / BITS_PER_UNIT;
9686 else if (code == POINTER_PLUS_EXPR)
9688 tree op0, op1;
9689 unsigned HOST_WIDE_INT modulus;
9690 enum tree_code inner_code;
9692 op0 = TREE_OPERAND (expr, 0);
9693 STRIP_NOPS (op0);
9694 modulus = get_pointer_modulus_and_residue (op0, residue,
9695 allow_func_align);
9697 op1 = TREE_OPERAND (expr, 1);
9698 STRIP_NOPS (op1);
9699 inner_code = TREE_CODE (op1);
9700 if (inner_code == INTEGER_CST)
9702 *residue += TREE_INT_CST_LOW (op1);
9703 return modulus;
9705 else if (inner_code == MULT_EXPR)
9707 op1 = TREE_OPERAND (op1, 1);
9708 if (TREE_CODE (op1) == INTEGER_CST)
9710 unsigned HOST_WIDE_INT align;
9712 /* Compute the greatest power-of-2 divisor of op1. */
9713 align = TREE_INT_CST_LOW (op1);
9714 align &= -align;
9716 /* If align is non-zero and less than *modulus, replace
9717 *modulus with align., If align is 0, then either op1 is 0
9718 or the greatest power-of-2 divisor of op1 doesn't fit in an
9719 unsigned HOST_WIDE_INT. In either case, no additional
9720 constraint is imposed. */
9721 if (align)
9722 modulus = MIN (modulus, align);
9724 return modulus;
9729 /* If we get here, we were unable to determine anything useful about the
9730 expression. */
9731 return 1;
9734 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9735 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9737 static bool
9738 vec_cst_ctor_to_array (tree arg, tree *elts)
9740 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9742 if (TREE_CODE (arg) == VECTOR_CST)
9744 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9745 elts[i] = VECTOR_CST_ELT (arg, i);
9747 else if (TREE_CODE (arg) == CONSTRUCTOR)
9749 constructor_elt *elt;
9751 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9752 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9753 return false;
9754 else
9755 elts[i] = elt->value;
9757 else
9758 return false;
9759 for (; i < nelts; i++)
9760 elts[i]
9761 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9762 return true;
9765 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9766 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9767 NULL_TREE otherwise. */
9769 static tree
9770 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9772 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9773 tree *elts;
9774 bool need_ctor = false;
9776 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9777 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9778 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9779 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9780 return NULL_TREE;
9782 elts = XALLOCAVEC (tree, nelts * 3);
9783 if (!vec_cst_ctor_to_array (arg0, elts)
9784 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9785 return NULL_TREE;
9787 for (i = 0; i < nelts; i++)
9789 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9790 need_ctor = true;
9791 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9794 if (need_ctor)
9796 vec<constructor_elt, va_gc> *v;
9797 vec_alloc (v, nelts);
9798 for (i = 0; i < nelts; i++)
9799 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9800 return build_constructor (type, v);
9802 else
9803 return build_vector (type, &elts[2 * nelts]);
9806 /* Try to fold a pointer difference of type TYPE two address expressions of
9807 array references AREF0 and AREF1 using location LOC. Return a
9808 simplified expression for the difference or NULL_TREE. */
9810 static tree
9811 fold_addr_of_array_ref_difference (location_t loc, tree type,
9812 tree aref0, tree aref1)
9814 tree base0 = TREE_OPERAND (aref0, 0);
9815 tree base1 = TREE_OPERAND (aref1, 0);
9816 tree base_offset = build_int_cst (type, 0);
9818 /* If the bases are array references as well, recurse. If the bases
9819 are pointer indirections compute the difference of the pointers.
9820 If the bases are equal, we are set. */
9821 if ((TREE_CODE (base0) == ARRAY_REF
9822 && TREE_CODE (base1) == ARRAY_REF
9823 && (base_offset
9824 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9825 || (INDIRECT_REF_P (base0)
9826 && INDIRECT_REF_P (base1)
9827 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9828 TREE_OPERAND (base0, 0),
9829 TREE_OPERAND (base1, 0))))
9830 || operand_equal_p (base0, base1, 0))
9832 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9833 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9834 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9835 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9836 return fold_build2_loc (loc, PLUS_EXPR, type,
9837 base_offset,
9838 fold_build2_loc (loc, MULT_EXPR, type,
9839 diff, esz));
9841 return NULL_TREE;
9844 /* If the real or vector real constant CST of type TYPE has an exact
9845 inverse, return it, else return NULL. */
9847 static tree
9848 exact_inverse (tree type, tree cst)
9850 REAL_VALUE_TYPE r;
9851 tree unit_type, *elts;
9852 enum machine_mode mode;
9853 unsigned vec_nelts, i;
9855 switch (TREE_CODE (cst))
9857 case REAL_CST:
9858 r = TREE_REAL_CST (cst);
9860 if (exact_real_inverse (TYPE_MODE (type), &r))
9861 return build_real (type, r);
9863 return NULL_TREE;
9865 case VECTOR_CST:
9866 vec_nelts = VECTOR_CST_NELTS (cst);
9867 elts = XALLOCAVEC (tree, vec_nelts);
9868 unit_type = TREE_TYPE (type);
9869 mode = TYPE_MODE (unit_type);
9871 for (i = 0; i < vec_nelts; i++)
9873 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9874 if (!exact_real_inverse (mode, &r))
9875 return NULL_TREE;
9876 elts[i] = build_real (unit_type, r);
9879 return build_vector (type, elts);
9881 default:
9882 return NULL_TREE;
9886 /* Fold a binary expression of code CODE and type TYPE with operands
9887 OP0 and OP1. LOC is the location of the resulting expression.
9888 Return the folded expression if folding is successful. Otherwise,
9889 return NULL_TREE. */
9891 tree
9892 fold_binary_loc (location_t loc,
9893 enum tree_code code, tree type, tree op0, tree op1)
9895 enum tree_code_class kind = TREE_CODE_CLASS (code);
9896 tree arg0, arg1, tem;
9897 tree t1 = NULL_TREE;
9898 bool strict_overflow_p;
9899 unsigned int prec;
9901 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9902 && TREE_CODE_LENGTH (code) == 2
9903 && op0 != NULL_TREE
9904 && op1 != NULL_TREE);
9906 arg0 = op0;
9907 arg1 = op1;
9909 /* Strip any conversions that don't change the mode. This is
9910 safe for every expression, except for a comparison expression
9911 because its signedness is derived from its operands. So, in
9912 the latter case, only strip conversions that don't change the
9913 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9914 preserved.
9916 Note that this is done as an internal manipulation within the
9917 constant folder, in order to find the simplest representation
9918 of the arguments so that their form can be studied. In any
9919 cases, the appropriate type conversions should be put back in
9920 the tree that will get out of the constant folder. */
9922 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9924 STRIP_SIGN_NOPS (arg0);
9925 STRIP_SIGN_NOPS (arg1);
9927 else
9929 STRIP_NOPS (arg0);
9930 STRIP_NOPS (arg1);
9933 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9934 constant but we can't do arithmetic on them. */
9935 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9936 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9937 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9938 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9939 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9940 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9941 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9943 if (kind == tcc_binary)
9945 /* Make sure type and arg0 have the same saturating flag. */
9946 gcc_assert (TYPE_SATURATING (type)
9947 == TYPE_SATURATING (TREE_TYPE (arg0)));
9948 tem = const_binop (code, arg0, arg1);
9950 else if (kind == tcc_comparison)
9951 tem = fold_relational_const (code, type, arg0, arg1);
9952 else
9953 tem = NULL_TREE;
9955 if (tem != NULL_TREE)
9957 if (TREE_TYPE (tem) != type)
9958 tem = fold_convert_loc (loc, type, tem);
9959 return tem;
9963 /* If this is a commutative operation, and ARG0 is a constant, move it
9964 to ARG1 to reduce the number of tests below. */
9965 if (commutative_tree_code (code)
9966 && tree_swap_operands_p (arg0, arg1, true))
9967 return fold_build2_loc (loc, code, type, op1, op0);
9969 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9971 First check for cases where an arithmetic operation is applied to a
9972 compound, conditional, or comparison operation. Push the arithmetic
9973 operation inside the compound or conditional to see if any folding
9974 can then be done. Convert comparison to conditional for this purpose.
9975 The also optimizes non-constant cases that used to be done in
9976 expand_expr.
9978 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9979 one of the operands is a comparison and the other is a comparison, a
9980 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9981 code below would make the expression more complex. Change it to a
9982 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9983 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9985 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9986 || code == EQ_EXPR || code == NE_EXPR)
9987 && TREE_CODE (type) != VECTOR_TYPE
9988 && ((truth_value_p (TREE_CODE (arg0))
9989 && (truth_value_p (TREE_CODE (arg1))
9990 || (TREE_CODE (arg1) == BIT_AND_EXPR
9991 && integer_onep (TREE_OPERAND (arg1, 1)))))
9992 || (truth_value_p (TREE_CODE (arg1))
9993 && (truth_value_p (TREE_CODE (arg0))
9994 || (TREE_CODE (arg0) == BIT_AND_EXPR
9995 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9997 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9998 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9999 : TRUTH_XOR_EXPR,
10000 boolean_type_node,
10001 fold_convert_loc (loc, boolean_type_node, arg0),
10002 fold_convert_loc (loc, boolean_type_node, arg1));
10004 if (code == EQ_EXPR)
10005 tem = invert_truthvalue_loc (loc, tem);
10007 return fold_convert_loc (loc, type, tem);
10010 if (TREE_CODE_CLASS (code) == tcc_binary
10011 || TREE_CODE_CLASS (code) == tcc_comparison)
10013 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10015 tem = fold_build2_loc (loc, code, type,
10016 fold_convert_loc (loc, TREE_TYPE (op0),
10017 TREE_OPERAND (arg0, 1)), op1);
10018 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10019 tem);
10021 if (TREE_CODE (arg1) == COMPOUND_EXPR
10022 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10024 tem = fold_build2_loc (loc, code, type, op0,
10025 fold_convert_loc (loc, TREE_TYPE (op1),
10026 TREE_OPERAND (arg1, 1)));
10027 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10028 tem);
10031 if (TREE_CODE (arg0) == COND_EXPR
10032 || TREE_CODE (arg0) == VEC_COND_EXPR
10033 || COMPARISON_CLASS_P (arg0))
10035 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10036 arg0, arg1,
10037 /*cond_first_p=*/1);
10038 if (tem != NULL_TREE)
10039 return tem;
10042 if (TREE_CODE (arg1) == COND_EXPR
10043 || TREE_CODE (arg1) == VEC_COND_EXPR
10044 || COMPARISON_CLASS_P (arg1))
10046 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10047 arg1, arg0,
10048 /*cond_first_p=*/0);
10049 if (tem != NULL_TREE)
10050 return tem;
10054 switch (code)
10056 case MEM_REF:
10057 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10058 if (TREE_CODE (arg0) == ADDR_EXPR
10059 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10061 tree iref = TREE_OPERAND (arg0, 0);
10062 return fold_build2 (MEM_REF, type,
10063 TREE_OPERAND (iref, 0),
10064 int_const_binop (PLUS_EXPR, arg1,
10065 TREE_OPERAND (iref, 1)));
10068 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10069 if (TREE_CODE (arg0) == ADDR_EXPR
10070 && handled_component_p (TREE_OPERAND (arg0, 0)))
10072 tree base;
10073 HOST_WIDE_INT coffset;
10074 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10075 &coffset);
10076 if (!base)
10077 return NULL_TREE;
10078 return fold_build2 (MEM_REF, type,
10079 build_fold_addr_expr (base),
10080 int_const_binop (PLUS_EXPR, arg1,
10081 size_int (coffset)));
10084 return NULL_TREE;
10086 case POINTER_PLUS_EXPR:
10087 /* 0 +p index -> (type)index */
10088 if (integer_zerop (arg0))
10089 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10091 /* PTR +p 0 -> PTR */
10092 if (integer_zerop (arg1))
10093 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10095 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10096 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10097 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10098 return fold_convert_loc (loc, type,
10099 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10100 fold_convert_loc (loc, sizetype,
10101 arg1),
10102 fold_convert_loc (loc, sizetype,
10103 arg0)));
10105 /* (PTR +p B) +p A -> PTR +p (B + A) */
10106 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10108 tree inner;
10109 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10110 tree arg00 = TREE_OPERAND (arg0, 0);
10111 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10112 arg01, fold_convert_loc (loc, sizetype, arg1));
10113 return fold_convert_loc (loc, type,
10114 fold_build_pointer_plus_loc (loc,
10115 arg00, inner));
10118 /* PTR_CST +p CST -> CST1 */
10119 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10120 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10121 fold_convert_loc (loc, type, arg1));
10123 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10124 of the array. Loop optimizer sometimes produce this type of
10125 expressions. */
10126 if (TREE_CODE (arg0) == ADDR_EXPR)
10128 tem = try_move_mult_to_index (loc, arg0,
10129 fold_convert_loc (loc,
10130 ssizetype, arg1));
10131 if (tem)
10132 return fold_convert_loc (loc, type, tem);
10135 return NULL_TREE;
10137 case PLUS_EXPR:
10138 /* A + (-B) -> A - B */
10139 if (TREE_CODE (arg1) == NEGATE_EXPR)
10140 return fold_build2_loc (loc, MINUS_EXPR, type,
10141 fold_convert_loc (loc, type, arg0),
10142 fold_convert_loc (loc, type,
10143 TREE_OPERAND (arg1, 0)));
10144 /* (-A) + B -> B - A */
10145 if (TREE_CODE (arg0) == NEGATE_EXPR
10146 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10147 return fold_build2_loc (loc, MINUS_EXPR, type,
10148 fold_convert_loc (loc, type, arg1),
10149 fold_convert_loc (loc, type,
10150 TREE_OPERAND (arg0, 0)));
10152 if (INTEGRAL_TYPE_P (type))
10154 /* Convert ~A + 1 to -A. */
10155 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10156 && integer_onep (arg1))
10157 return fold_build1_loc (loc, NEGATE_EXPR, type,
10158 fold_convert_loc (loc, type,
10159 TREE_OPERAND (arg0, 0)));
10161 /* ~X + X is -1. */
10162 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10163 && !TYPE_OVERFLOW_TRAPS (type))
10165 tree tem = TREE_OPERAND (arg0, 0);
10167 STRIP_NOPS (tem);
10168 if (operand_equal_p (tem, arg1, 0))
10170 t1 = build_minus_one_cst (type);
10171 return omit_one_operand_loc (loc, type, t1, arg1);
10175 /* X + ~X is -1. */
10176 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10177 && !TYPE_OVERFLOW_TRAPS (type))
10179 tree tem = TREE_OPERAND (arg1, 0);
10181 STRIP_NOPS (tem);
10182 if (operand_equal_p (arg0, tem, 0))
10184 t1 = build_minus_one_cst (type);
10185 return omit_one_operand_loc (loc, type, t1, arg0);
10189 /* X + (X / CST) * -CST is X % CST. */
10190 if (TREE_CODE (arg1) == MULT_EXPR
10191 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10192 && operand_equal_p (arg0,
10193 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10195 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10196 tree cst1 = TREE_OPERAND (arg1, 1);
10197 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10198 cst1, cst0);
10199 if (sum && integer_zerop (sum))
10200 return fold_convert_loc (loc, type,
10201 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10202 TREE_TYPE (arg0), arg0,
10203 cst0));
10207 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10208 one. Make sure the type is not saturating and has the signedness of
10209 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10210 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10211 if ((TREE_CODE (arg0) == MULT_EXPR
10212 || TREE_CODE (arg1) == MULT_EXPR)
10213 && !TYPE_SATURATING (type)
10214 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10215 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10216 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10218 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10219 if (tem)
10220 return tem;
10223 if (! FLOAT_TYPE_P (type))
10225 if (integer_zerop (arg1))
10226 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10228 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10229 with a constant, and the two constants have no bits in common,
10230 we should treat this as a BIT_IOR_EXPR since this may produce more
10231 simplifications. */
10232 if (TREE_CODE (arg0) == BIT_AND_EXPR
10233 && TREE_CODE (arg1) == BIT_AND_EXPR
10234 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10235 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10236 && integer_zerop (const_binop (BIT_AND_EXPR,
10237 TREE_OPERAND (arg0, 1),
10238 TREE_OPERAND (arg1, 1))))
10240 code = BIT_IOR_EXPR;
10241 goto bit_ior;
10244 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10245 (plus (plus (mult) (mult)) (foo)) so that we can
10246 take advantage of the factoring cases below. */
10247 if (TYPE_OVERFLOW_WRAPS (type)
10248 && (((TREE_CODE (arg0) == PLUS_EXPR
10249 || TREE_CODE (arg0) == MINUS_EXPR)
10250 && TREE_CODE (arg1) == MULT_EXPR)
10251 || ((TREE_CODE (arg1) == PLUS_EXPR
10252 || TREE_CODE (arg1) == MINUS_EXPR)
10253 && TREE_CODE (arg0) == MULT_EXPR)))
10255 tree parg0, parg1, parg, marg;
10256 enum tree_code pcode;
10258 if (TREE_CODE (arg1) == MULT_EXPR)
10259 parg = arg0, marg = arg1;
10260 else
10261 parg = arg1, marg = arg0;
10262 pcode = TREE_CODE (parg);
10263 parg0 = TREE_OPERAND (parg, 0);
10264 parg1 = TREE_OPERAND (parg, 1);
10265 STRIP_NOPS (parg0);
10266 STRIP_NOPS (parg1);
10268 if (TREE_CODE (parg0) == MULT_EXPR
10269 && TREE_CODE (parg1) != MULT_EXPR)
10270 return fold_build2_loc (loc, pcode, type,
10271 fold_build2_loc (loc, PLUS_EXPR, type,
10272 fold_convert_loc (loc, type,
10273 parg0),
10274 fold_convert_loc (loc, type,
10275 marg)),
10276 fold_convert_loc (loc, type, parg1));
10277 if (TREE_CODE (parg0) != MULT_EXPR
10278 && TREE_CODE (parg1) == MULT_EXPR)
10279 return
10280 fold_build2_loc (loc, PLUS_EXPR, type,
10281 fold_convert_loc (loc, type, parg0),
10282 fold_build2_loc (loc, pcode, type,
10283 fold_convert_loc (loc, type, marg),
10284 fold_convert_loc (loc, type,
10285 parg1)));
10288 else
10290 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10291 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10292 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10294 /* Likewise if the operands are reversed. */
10295 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10296 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10298 /* Convert X + -C into X - C. */
10299 if (TREE_CODE (arg1) == REAL_CST
10300 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10302 tem = fold_negate_const (arg1, type);
10303 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10304 return fold_build2_loc (loc, MINUS_EXPR, type,
10305 fold_convert_loc (loc, type, arg0),
10306 fold_convert_loc (loc, type, tem));
10309 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10310 to __complex__ ( x, y ). This is not the same for SNaNs or
10311 if signed zeros are involved. */
10312 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10313 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10314 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10316 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10317 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10318 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10319 bool arg0rz = false, arg0iz = false;
10320 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10321 || (arg0i && (arg0iz = real_zerop (arg0i))))
10323 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10324 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10325 if (arg0rz && arg1i && real_zerop (arg1i))
10327 tree rp = arg1r ? arg1r
10328 : build1 (REALPART_EXPR, rtype, arg1);
10329 tree ip = arg0i ? arg0i
10330 : build1 (IMAGPART_EXPR, rtype, arg0);
10331 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10333 else if (arg0iz && arg1r && real_zerop (arg1r))
10335 tree rp = arg0r ? arg0r
10336 : build1 (REALPART_EXPR, rtype, arg0);
10337 tree ip = arg1i ? arg1i
10338 : build1 (IMAGPART_EXPR, rtype, arg1);
10339 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10344 if (flag_unsafe_math_optimizations
10345 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10346 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10347 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10348 return tem;
10350 /* Convert x+x into x*2.0. */
10351 if (operand_equal_p (arg0, arg1, 0)
10352 && SCALAR_FLOAT_TYPE_P (type))
10353 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10354 build_real (type, dconst2));
10356 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10357 We associate floats only if the user has specified
10358 -fassociative-math. */
10359 if (flag_associative_math
10360 && TREE_CODE (arg1) == PLUS_EXPR
10361 && TREE_CODE (arg0) != MULT_EXPR)
10363 tree tree10 = TREE_OPERAND (arg1, 0);
10364 tree tree11 = TREE_OPERAND (arg1, 1);
10365 if (TREE_CODE (tree11) == MULT_EXPR
10366 && TREE_CODE (tree10) == MULT_EXPR)
10368 tree tree0;
10369 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10370 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10373 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10374 We associate floats only if the user has specified
10375 -fassociative-math. */
10376 if (flag_associative_math
10377 && TREE_CODE (arg0) == PLUS_EXPR
10378 && TREE_CODE (arg1) != MULT_EXPR)
10380 tree tree00 = TREE_OPERAND (arg0, 0);
10381 tree tree01 = TREE_OPERAND (arg0, 1);
10382 if (TREE_CODE (tree01) == MULT_EXPR
10383 && TREE_CODE (tree00) == MULT_EXPR)
10385 tree tree0;
10386 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10387 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10392 bit_rotate:
10393 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10394 is a rotate of A by C1 bits. */
10395 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10396 is a rotate of A by B bits. */
10398 enum tree_code code0, code1;
10399 tree rtype;
10400 code0 = TREE_CODE (arg0);
10401 code1 = TREE_CODE (arg1);
10402 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10403 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10404 && operand_equal_p (TREE_OPERAND (arg0, 0),
10405 TREE_OPERAND (arg1, 0), 0)
10406 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10407 TYPE_UNSIGNED (rtype))
10408 /* Only create rotates in complete modes. Other cases are not
10409 expanded properly. */
10410 && (element_precision (rtype)
10411 == element_precision (TYPE_MODE (rtype))))
10413 tree tree01, tree11;
10414 enum tree_code code01, code11;
10416 tree01 = TREE_OPERAND (arg0, 1);
10417 tree11 = TREE_OPERAND (arg1, 1);
10418 STRIP_NOPS (tree01);
10419 STRIP_NOPS (tree11);
10420 code01 = TREE_CODE (tree01);
10421 code11 = TREE_CODE (tree11);
10422 if (code01 == INTEGER_CST
10423 && code11 == INTEGER_CST
10424 && TREE_INT_CST_HIGH (tree01) == 0
10425 && TREE_INT_CST_HIGH (tree11) == 0
10426 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10427 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10429 tem = build2_loc (loc, LROTATE_EXPR,
10430 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10431 TREE_OPERAND (arg0, 0),
10432 code0 == LSHIFT_EXPR ? tree01 : tree11);
10433 return fold_convert_loc (loc, type, tem);
10435 else if (code11 == MINUS_EXPR)
10437 tree tree110, tree111;
10438 tree110 = TREE_OPERAND (tree11, 0);
10439 tree111 = TREE_OPERAND (tree11, 1);
10440 STRIP_NOPS (tree110);
10441 STRIP_NOPS (tree111);
10442 if (TREE_CODE (tree110) == INTEGER_CST
10443 && 0 == compare_tree_int (tree110,
10444 element_precision
10445 (TREE_TYPE (TREE_OPERAND
10446 (arg0, 0))))
10447 && operand_equal_p (tree01, tree111, 0))
10448 return
10449 fold_convert_loc (loc, type,
10450 build2 ((code0 == LSHIFT_EXPR
10451 ? LROTATE_EXPR
10452 : RROTATE_EXPR),
10453 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10454 TREE_OPERAND (arg0, 0), tree01));
10456 else if (code01 == MINUS_EXPR)
10458 tree tree010, tree011;
10459 tree010 = TREE_OPERAND (tree01, 0);
10460 tree011 = TREE_OPERAND (tree01, 1);
10461 STRIP_NOPS (tree010);
10462 STRIP_NOPS (tree011);
10463 if (TREE_CODE (tree010) == INTEGER_CST
10464 && 0 == compare_tree_int (tree010,
10465 element_precision
10466 (TREE_TYPE (TREE_OPERAND
10467 (arg0, 0))))
10468 && operand_equal_p (tree11, tree011, 0))
10469 return fold_convert_loc
10470 (loc, type,
10471 build2 ((code0 != LSHIFT_EXPR
10472 ? LROTATE_EXPR
10473 : RROTATE_EXPR),
10474 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10475 TREE_OPERAND (arg0, 0), tree11));
10480 associate:
10481 /* In most languages, can't associate operations on floats through
10482 parentheses. Rather than remember where the parentheses were, we
10483 don't associate floats at all, unless the user has specified
10484 -fassociative-math.
10485 And, we need to make sure type is not saturating. */
10487 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10488 && !TYPE_SATURATING (type))
10490 tree var0, con0, lit0, minus_lit0;
10491 tree var1, con1, lit1, minus_lit1;
10492 tree atype = type;
10493 bool ok = true;
10495 /* Split both trees into variables, constants, and literals. Then
10496 associate each group together, the constants with literals,
10497 then the result with variables. This increases the chances of
10498 literals being recombined later and of generating relocatable
10499 expressions for the sum of a constant and literal. */
10500 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10501 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10502 code == MINUS_EXPR);
10504 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10505 if (code == MINUS_EXPR)
10506 code = PLUS_EXPR;
10508 /* With undefined overflow prefer doing association in a type
10509 which wraps on overflow, if that is one of the operand types. */
10510 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10511 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10513 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10514 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10515 atype = TREE_TYPE (arg0);
10516 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10517 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10518 atype = TREE_TYPE (arg1);
10519 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10522 /* With undefined overflow we can only associate constants with one
10523 variable, and constants whose association doesn't overflow. */
10524 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10525 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10527 if (var0 && var1)
10529 tree tmp0 = var0;
10530 tree tmp1 = var1;
10532 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10533 tmp0 = TREE_OPERAND (tmp0, 0);
10534 if (CONVERT_EXPR_P (tmp0)
10535 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10536 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10537 <= TYPE_PRECISION (atype)))
10538 tmp0 = TREE_OPERAND (tmp0, 0);
10539 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10540 tmp1 = TREE_OPERAND (tmp1, 0);
10541 if (CONVERT_EXPR_P (tmp1)
10542 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10543 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10544 <= TYPE_PRECISION (atype)))
10545 tmp1 = TREE_OPERAND (tmp1, 0);
10546 /* The only case we can still associate with two variables
10547 is if they are the same, modulo negation and bit-pattern
10548 preserving conversions. */
10549 if (!operand_equal_p (tmp0, tmp1, 0))
10550 ok = false;
10554 /* Only do something if we found more than two objects. Otherwise,
10555 nothing has changed and we risk infinite recursion. */
10556 if (ok
10557 && (2 < ((var0 != 0) + (var1 != 0)
10558 + (con0 != 0) + (con1 != 0)
10559 + (lit0 != 0) + (lit1 != 0)
10560 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10562 bool any_overflows = false;
10563 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10564 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10565 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10566 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10567 var0 = associate_trees (loc, var0, var1, code, atype);
10568 con0 = associate_trees (loc, con0, con1, code, atype);
10569 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10570 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10571 code, atype);
10573 /* Preserve the MINUS_EXPR if the negative part of the literal is
10574 greater than the positive part. Otherwise, the multiplicative
10575 folding code (i.e extract_muldiv) may be fooled in case
10576 unsigned constants are subtracted, like in the following
10577 example: ((X*2 + 4) - 8U)/2. */
10578 if (minus_lit0 && lit0)
10580 if (TREE_CODE (lit0) == INTEGER_CST
10581 && TREE_CODE (minus_lit0) == INTEGER_CST
10582 && tree_int_cst_lt (lit0, minus_lit0))
10584 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10585 MINUS_EXPR, atype);
10586 lit0 = 0;
10588 else
10590 lit0 = associate_trees (loc, lit0, minus_lit0,
10591 MINUS_EXPR, atype);
10592 minus_lit0 = 0;
10596 /* Don't introduce overflows through reassociation. */
10597 if (!any_overflows
10598 && ((lit0 && TREE_OVERFLOW (lit0))
10599 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10600 return NULL_TREE;
10602 if (minus_lit0)
10604 if (con0 == 0)
10605 return
10606 fold_convert_loc (loc, type,
10607 associate_trees (loc, var0, minus_lit0,
10608 MINUS_EXPR, atype));
10609 else
10611 con0 = associate_trees (loc, con0, minus_lit0,
10612 MINUS_EXPR, atype);
10613 return
10614 fold_convert_loc (loc, type,
10615 associate_trees (loc, var0, con0,
10616 PLUS_EXPR, atype));
10620 con0 = associate_trees (loc, con0, lit0, code, atype);
10621 return
10622 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10623 code, atype));
10627 return NULL_TREE;
10629 case MINUS_EXPR:
10630 /* Pointer simplifications for subtraction, simple reassociations. */
10631 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10633 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10634 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10635 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10637 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10638 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10639 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10640 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10641 return fold_build2_loc (loc, PLUS_EXPR, type,
10642 fold_build2_loc (loc, MINUS_EXPR, type,
10643 arg00, arg10),
10644 fold_build2_loc (loc, MINUS_EXPR, type,
10645 arg01, arg11));
10647 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10648 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10650 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10651 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10652 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10653 fold_convert_loc (loc, type, arg1));
10654 if (tmp)
10655 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10658 /* A - (-B) -> A + B */
10659 if (TREE_CODE (arg1) == NEGATE_EXPR)
10660 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10661 fold_convert_loc (loc, type,
10662 TREE_OPERAND (arg1, 0)));
10663 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10664 if (TREE_CODE (arg0) == NEGATE_EXPR
10665 && (FLOAT_TYPE_P (type)
10666 || INTEGRAL_TYPE_P (type))
10667 && negate_expr_p (arg1)
10668 && reorder_operands_p (arg0, arg1))
10669 return fold_build2_loc (loc, MINUS_EXPR, type,
10670 fold_convert_loc (loc, type,
10671 negate_expr (arg1)),
10672 fold_convert_loc (loc, type,
10673 TREE_OPERAND (arg0, 0)));
10674 /* Convert -A - 1 to ~A. */
10675 if (INTEGRAL_TYPE_P (type)
10676 && TREE_CODE (arg0) == NEGATE_EXPR
10677 && integer_onep (arg1)
10678 && !TYPE_OVERFLOW_TRAPS (type))
10679 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10680 fold_convert_loc (loc, type,
10681 TREE_OPERAND (arg0, 0)));
10683 /* Convert -1 - A to ~A. */
10684 if (INTEGRAL_TYPE_P (type)
10685 && integer_all_onesp (arg0))
10686 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10689 /* X - (X / CST) * CST is X % CST. */
10690 if (INTEGRAL_TYPE_P (type)
10691 && TREE_CODE (arg1) == MULT_EXPR
10692 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10693 && operand_equal_p (arg0,
10694 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10695 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10696 TREE_OPERAND (arg1, 1), 0))
10697 return
10698 fold_convert_loc (loc, type,
10699 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10700 arg0, TREE_OPERAND (arg1, 1)));
10702 if (! FLOAT_TYPE_P (type))
10704 if (integer_zerop (arg0))
10705 return negate_expr (fold_convert_loc (loc, type, arg1));
10706 if (integer_zerop (arg1))
10707 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10709 /* Fold A - (A & B) into ~B & A. */
10710 if (!TREE_SIDE_EFFECTS (arg0)
10711 && TREE_CODE (arg1) == BIT_AND_EXPR)
10713 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10715 tree arg10 = fold_convert_loc (loc, type,
10716 TREE_OPERAND (arg1, 0));
10717 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10718 fold_build1_loc (loc, BIT_NOT_EXPR,
10719 type, arg10),
10720 fold_convert_loc (loc, type, arg0));
10722 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10724 tree arg11 = fold_convert_loc (loc,
10725 type, TREE_OPERAND (arg1, 1));
10726 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10727 fold_build1_loc (loc, BIT_NOT_EXPR,
10728 type, arg11),
10729 fold_convert_loc (loc, type, arg0));
10733 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10734 any power of 2 minus 1. */
10735 if (TREE_CODE (arg0) == BIT_AND_EXPR
10736 && TREE_CODE (arg1) == BIT_AND_EXPR
10737 && operand_equal_p (TREE_OPERAND (arg0, 0),
10738 TREE_OPERAND (arg1, 0), 0))
10740 tree mask0 = TREE_OPERAND (arg0, 1);
10741 tree mask1 = TREE_OPERAND (arg1, 1);
10742 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10744 if (operand_equal_p (tem, mask1, 0))
10746 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10747 TREE_OPERAND (arg0, 0), mask1);
10748 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10753 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10754 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10755 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10757 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10758 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10759 (-ARG1 + ARG0) reduces to -ARG1. */
10760 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10761 return negate_expr (fold_convert_loc (loc, type, arg1));
10763 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10764 __complex__ ( x, -y ). This is not the same for SNaNs or if
10765 signed zeros are involved. */
10766 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10767 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10768 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10770 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10771 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10772 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10773 bool arg0rz = false, arg0iz = false;
10774 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10775 || (arg0i && (arg0iz = real_zerop (arg0i))))
10777 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10778 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10779 if (arg0rz && arg1i && real_zerop (arg1i))
10781 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10782 arg1r ? arg1r
10783 : build1 (REALPART_EXPR, rtype, arg1));
10784 tree ip = arg0i ? arg0i
10785 : build1 (IMAGPART_EXPR, rtype, arg0);
10786 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10788 else if (arg0iz && arg1r && real_zerop (arg1r))
10790 tree rp = arg0r ? arg0r
10791 : build1 (REALPART_EXPR, rtype, arg0);
10792 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10793 arg1i ? arg1i
10794 : build1 (IMAGPART_EXPR, rtype, arg1));
10795 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10800 /* Fold &x - &x. This can happen from &x.foo - &x.
10801 This is unsafe for certain floats even in non-IEEE formats.
10802 In IEEE, it is unsafe because it does wrong for NaNs.
10803 Also note that operand_equal_p is always false if an operand
10804 is volatile. */
10806 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10807 && operand_equal_p (arg0, arg1, 0))
10808 return build_zero_cst (type);
10810 /* A - B -> A + (-B) if B is easily negatable. */
10811 if (negate_expr_p (arg1)
10812 && ((FLOAT_TYPE_P (type)
10813 /* Avoid this transformation if B is a positive REAL_CST. */
10814 && (TREE_CODE (arg1) != REAL_CST
10815 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10816 || INTEGRAL_TYPE_P (type)))
10817 return fold_build2_loc (loc, PLUS_EXPR, type,
10818 fold_convert_loc (loc, type, arg0),
10819 fold_convert_loc (loc, type,
10820 negate_expr (arg1)));
10822 /* Try folding difference of addresses. */
10824 HOST_WIDE_INT diff;
10826 if ((TREE_CODE (arg0) == ADDR_EXPR
10827 || TREE_CODE (arg1) == ADDR_EXPR)
10828 && ptr_difference_const (arg0, arg1, &diff))
10829 return build_int_cst_type (type, diff);
10832 /* Fold &a[i] - &a[j] to i-j. */
10833 if (TREE_CODE (arg0) == ADDR_EXPR
10834 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10835 && TREE_CODE (arg1) == ADDR_EXPR
10836 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10838 tree tem = fold_addr_of_array_ref_difference (loc, type,
10839 TREE_OPERAND (arg0, 0),
10840 TREE_OPERAND (arg1, 0));
10841 if (tem)
10842 return tem;
10845 if (FLOAT_TYPE_P (type)
10846 && flag_unsafe_math_optimizations
10847 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10848 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10849 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10850 return tem;
10852 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10853 one. Make sure the type is not saturating and has the signedness of
10854 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10855 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10856 if ((TREE_CODE (arg0) == MULT_EXPR
10857 || TREE_CODE (arg1) == MULT_EXPR)
10858 && !TYPE_SATURATING (type)
10859 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10860 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10861 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10863 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10864 if (tem)
10865 return tem;
10868 goto associate;
10870 case MULT_EXPR:
10871 /* (-A) * (-B) -> A * B */
10872 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10873 return fold_build2_loc (loc, MULT_EXPR, type,
10874 fold_convert_loc (loc, type,
10875 TREE_OPERAND (arg0, 0)),
10876 fold_convert_loc (loc, type,
10877 negate_expr (arg1)));
10878 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10879 return fold_build2_loc (loc, MULT_EXPR, type,
10880 fold_convert_loc (loc, type,
10881 negate_expr (arg0)),
10882 fold_convert_loc (loc, type,
10883 TREE_OPERAND (arg1, 0)));
10885 if (! FLOAT_TYPE_P (type))
10887 if (integer_zerop (arg1))
10888 return omit_one_operand_loc (loc, type, arg1, arg0);
10889 if (integer_onep (arg1))
10890 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10891 /* Transform x * -1 into -x. Make sure to do the negation
10892 on the original operand with conversions not stripped
10893 because we can only strip non-sign-changing conversions. */
10894 if (integer_minus_onep (arg1))
10895 return fold_convert_loc (loc, type, negate_expr (op0));
10896 /* Transform x * -C into -x * C if x is easily negatable. */
10897 if (TREE_CODE (arg1) == INTEGER_CST
10898 && tree_int_cst_sgn (arg1) == -1
10899 && negate_expr_p (arg0)
10900 && (tem = negate_expr (arg1)) != arg1
10901 && !TREE_OVERFLOW (tem))
10902 return fold_build2_loc (loc, MULT_EXPR, type,
10903 fold_convert_loc (loc, type,
10904 negate_expr (arg0)),
10905 tem);
10907 /* (a * (1 << b)) is (a << b) */
10908 if (TREE_CODE (arg1) == LSHIFT_EXPR
10909 && integer_onep (TREE_OPERAND (arg1, 0)))
10910 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10911 TREE_OPERAND (arg1, 1));
10912 if (TREE_CODE (arg0) == LSHIFT_EXPR
10913 && integer_onep (TREE_OPERAND (arg0, 0)))
10914 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10915 TREE_OPERAND (arg0, 1));
10917 /* (A + A) * C -> A * 2 * C */
10918 if (TREE_CODE (arg0) == PLUS_EXPR
10919 && TREE_CODE (arg1) == INTEGER_CST
10920 && operand_equal_p (TREE_OPERAND (arg0, 0),
10921 TREE_OPERAND (arg0, 1), 0))
10922 return fold_build2_loc (loc, MULT_EXPR, type,
10923 omit_one_operand_loc (loc, type,
10924 TREE_OPERAND (arg0, 0),
10925 TREE_OPERAND (arg0, 1)),
10926 fold_build2_loc (loc, MULT_EXPR, type,
10927 build_int_cst (type, 2) , arg1));
10929 strict_overflow_p = false;
10930 if (TREE_CODE (arg1) == INTEGER_CST
10931 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10932 &strict_overflow_p)))
10934 if (strict_overflow_p)
10935 fold_overflow_warning (("assuming signed overflow does not "
10936 "occur when simplifying "
10937 "multiplication"),
10938 WARN_STRICT_OVERFLOW_MISC);
10939 return fold_convert_loc (loc, type, tem);
10942 /* Optimize z * conj(z) for integer complex numbers. */
10943 if (TREE_CODE (arg0) == CONJ_EXPR
10944 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10945 return fold_mult_zconjz (loc, type, arg1);
10946 if (TREE_CODE (arg1) == CONJ_EXPR
10947 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10948 return fold_mult_zconjz (loc, type, arg0);
10950 else
10952 /* Maybe fold x * 0 to 0. The expressions aren't the same
10953 when x is NaN, since x * 0 is also NaN. Nor are they the
10954 same in modes with signed zeros, since multiplying a
10955 negative value by 0 gives -0, not +0. */
10956 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10957 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10958 && real_zerop (arg1))
10959 return omit_one_operand_loc (loc, type, arg1, arg0);
10960 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10961 Likewise for complex arithmetic with signed zeros. */
10962 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10963 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10964 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10965 && real_onep (arg1))
10966 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10968 /* Transform x * -1.0 into -x. */
10969 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10970 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10971 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10972 && real_minus_onep (arg1))
10973 return fold_convert_loc (loc, type, negate_expr (arg0));
10975 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10976 the result for floating point types due to rounding so it is applied
10977 only if -fassociative-math was specify. */
10978 if (flag_associative_math
10979 && TREE_CODE (arg0) == RDIV_EXPR
10980 && TREE_CODE (arg1) == REAL_CST
10981 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10983 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10984 arg1);
10985 if (tem)
10986 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10987 TREE_OPERAND (arg0, 1));
10990 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10991 if (operand_equal_p (arg0, arg1, 0))
10993 tree tem = fold_strip_sign_ops (arg0);
10994 if (tem != NULL_TREE)
10996 tem = fold_convert_loc (loc, type, tem);
10997 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11001 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11002 This is not the same for NaNs or if signed zeros are
11003 involved. */
11004 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11005 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11006 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11007 && TREE_CODE (arg1) == COMPLEX_CST
11008 && real_zerop (TREE_REALPART (arg1)))
11010 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11011 if (real_onep (TREE_IMAGPART (arg1)))
11012 return
11013 fold_build2_loc (loc, COMPLEX_EXPR, type,
11014 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11015 rtype, arg0)),
11016 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11017 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11018 return
11019 fold_build2_loc (loc, COMPLEX_EXPR, type,
11020 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11021 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11022 rtype, arg0)));
11025 /* Optimize z * conj(z) for floating point complex numbers.
11026 Guarded by flag_unsafe_math_optimizations as non-finite
11027 imaginary components don't produce scalar results. */
11028 if (flag_unsafe_math_optimizations
11029 && TREE_CODE (arg0) == CONJ_EXPR
11030 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11031 return fold_mult_zconjz (loc, type, arg1);
11032 if (flag_unsafe_math_optimizations
11033 && TREE_CODE (arg1) == CONJ_EXPR
11034 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11035 return fold_mult_zconjz (loc, type, arg0);
11037 if (flag_unsafe_math_optimizations)
11039 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11040 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11042 /* Optimizations of root(...)*root(...). */
11043 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11045 tree rootfn, arg;
11046 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11047 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11049 /* Optimize sqrt(x)*sqrt(x) as x. */
11050 if (BUILTIN_SQRT_P (fcode0)
11051 && operand_equal_p (arg00, arg10, 0)
11052 && ! HONOR_SNANS (TYPE_MODE (type)))
11053 return arg00;
11055 /* Optimize root(x)*root(y) as root(x*y). */
11056 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11057 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11058 return build_call_expr_loc (loc, rootfn, 1, arg);
11061 /* Optimize expN(x)*expN(y) as expN(x+y). */
11062 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11064 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11065 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11066 CALL_EXPR_ARG (arg0, 0),
11067 CALL_EXPR_ARG (arg1, 0));
11068 return build_call_expr_loc (loc, expfn, 1, arg);
11071 /* Optimizations of pow(...)*pow(...). */
11072 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11073 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11074 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11076 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11077 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11078 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11079 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11081 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11082 if (operand_equal_p (arg01, arg11, 0))
11084 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11085 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11086 arg00, arg10);
11087 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11090 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11091 if (operand_equal_p (arg00, arg10, 0))
11093 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11094 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11095 arg01, arg11);
11096 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11100 /* Optimize tan(x)*cos(x) as sin(x). */
11101 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11102 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11103 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11104 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11105 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11106 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11107 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11108 CALL_EXPR_ARG (arg1, 0), 0))
11110 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11112 if (sinfn != NULL_TREE)
11113 return build_call_expr_loc (loc, sinfn, 1,
11114 CALL_EXPR_ARG (arg0, 0));
11117 /* Optimize x*pow(x,c) as pow(x,c+1). */
11118 if (fcode1 == BUILT_IN_POW
11119 || fcode1 == BUILT_IN_POWF
11120 || fcode1 == BUILT_IN_POWL)
11122 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11123 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11124 if (TREE_CODE (arg11) == REAL_CST
11125 && !TREE_OVERFLOW (arg11)
11126 && operand_equal_p (arg0, arg10, 0))
11128 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11129 REAL_VALUE_TYPE c;
11130 tree arg;
11132 c = TREE_REAL_CST (arg11);
11133 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11134 arg = build_real (type, c);
11135 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11139 /* Optimize pow(x,c)*x as pow(x,c+1). */
11140 if (fcode0 == BUILT_IN_POW
11141 || fcode0 == BUILT_IN_POWF
11142 || fcode0 == BUILT_IN_POWL)
11144 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11145 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11146 if (TREE_CODE (arg01) == REAL_CST
11147 && !TREE_OVERFLOW (arg01)
11148 && operand_equal_p (arg1, arg00, 0))
11150 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11151 REAL_VALUE_TYPE c;
11152 tree arg;
11154 c = TREE_REAL_CST (arg01);
11155 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11156 arg = build_real (type, c);
11157 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11161 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11162 if (!in_gimple_form
11163 && optimize
11164 && operand_equal_p (arg0, arg1, 0))
11166 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11168 if (powfn)
11170 tree arg = build_real (type, dconst2);
11171 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11176 goto associate;
11178 case BIT_IOR_EXPR:
11179 bit_ior:
11180 if (integer_all_onesp (arg1))
11181 return omit_one_operand_loc (loc, type, arg1, arg0);
11182 if (integer_zerop (arg1))
11183 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11184 if (operand_equal_p (arg0, arg1, 0))
11185 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11187 /* ~X | X is -1. */
11188 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11189 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11191 t1 = build_zero_cst (type);
11192 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11193 return omit_one_operand_loc (loc, type, t1, arg1);
11196 /* X | ~X is -1. */
11197 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11198 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11200 t1 = build_zero_cst (type);
11201 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11202 return omit_one_operand_loc (loc, type, t1, arg0);
11205 /* Canonicalize (X & C1) | C2. */
11206 if (TREE_CODE (arg0) == BIT_AND_EXPR
11207 && TREE_CODE (arg1) == INTEGER_CST
11208 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11210 double_int c1, c2, c3, msk;
11211 int width = TYPE_PRECISION (type), w;
11212 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11213 c2 = tree_to_double_int (arg1);
11215 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11216 if ((c1 & c2) == c1)
11217 return omit_one_operand_loc (loc, type, arg1,
11218 TREE_OPERAND (arg0, 0));
11220 msk = double_int::mask (width);
11222 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11223 if (msk.and_not (c1 | c2).is_zero ())
11224 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11225 TREE_OPERAND (arg0, 0), arg1);
11227 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11228 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11229 mode which allows further optimizations. */
11230 c1 &= msk;
11231 c2 &= msk;
11232 c3 = c1.and_not (c2);
11233 for (w = BITS_PER_UNIT;
11234 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11235 w <<= 1)
11237 unsigned HOST_WIDE_INT mask
11238 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11239 if (((c1.low | c2.low) & mask) == mask
11240 && (c1.low & ~mask) == 0 && c1.high == 0)
11242 c3 = double_int::from_uhwi (mask);
11243 break;
11246 if (c3 != c1)
11247 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11248 fold_build2_loc (loc, BIT_AND_EXPR, type,
11249 TREE_OPERAND (arg0, 0),
11250 double_int_to_tree (type,
11251 c3)),
11252 arg1);
11255 /* (X & Y) | Y is (X, Y). */
11256 if (TREE_CODE (arg0) == BIT_AND_EXPR
11257 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11258 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11259 /* (X & Y) | X is (Y, X). */
11260 if (TREE_CODE (arg0) == BIT_AND_EXPR
11261 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11262 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11263 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11264 /* X | (X & Y) is (Y, X). */
11265 if (TREE_CODE (arg1) == BIT_AND_EXPR
11266 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11267 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11268 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11269 /* X | (Y & X) is (Y, X). */
11270 if (TREE_CODE (arg1) == BIT_AND_EXPR
11271 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11272 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11273 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11275 /* (X & ~Y) | (~X & Y) is X ^ Y */
11276 if (TREE_CODE (arg0) == BIT_AND_EXPR
11277 && TREE_CODE (arg1) == BIT_AND_EXPR)
11279 tree a0, a1, l0, l1, n0, n1;
11281 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11282 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11284 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11285 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11287 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11288 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11290 if ((operand_equal_p (n0, a0, 0)
11291 && operand_equal_p (n1, a1, 0))
11292 || (operand_equal_p (n0, a1, 0)
11293 && operand_equal_p (n1, a0, 0)))
11294 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11297 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11298 if (t1 != NULL_TREE)
11299 return t1;
11301 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11303 This results in more efficient code for machines without a NAND
11304 instruction. Combine will canonicalize to the first form
11305 which will allow use of NAND instructions provided by the
11306 backend if they exist. */
11307 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11308 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11310 return
11311 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11312 build2 (BIT_AND_EXPR, type,
11313 fold_convert_loc (loc, type,
11314 TREE_OPERAND (arg0, 0)),
11315 fold_convert_loc (loc, type,
11316 TREE_OPERAND (arg1, 0))));
11319 /* See if this can be simplified into a rotate first. If that
11320 is unsuccessful continue in the association code. */
11321 goto bit_rotate;
11323 case BIT_XOR_EXPR:
11324 if (integer_zerop (arg1))
11325 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11326 if (integer_all_onesp (arg1))
11327 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11328 if (operand_equal_p (arg0, arg1, 0))
11329 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11331 /* ~X ^ X is -1. */
11332 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11333 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11335 t1 = build_zero_cst (type);
11336 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11337 return omit_one_operand_loc (loc, type, t1, arg1);
11340 /* X ^ ~X is -1. */
11341 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11342 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11344 t1 = build_zero_cst (type);
11345 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11346 return omit_one_operand_loc (loc, type, t1, arg0);
11349 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11350 with a constant, and the two constants have no bits in common,
11351 we should treat this as a BIT_IOR_EXPR since this may produce more
11352 simplifications. */
11353 if (TREE_CODE (arg0) == BIT_AND_EXPR
11354 && TREE_CODE (arg1) == BIT_AND_EXPR
11355 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11356 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11357 && integer_zerop (const_binop (BIT_AND_EXPR,
11358 TREE_OPERAND (arg0, 1),
11359 TREE_OPERAND (arg1, 1))))
11361 code = BIT_IOR_EXPR;
11362 goto bit_ior;
11365 /* (X | Y) ^ X -> Y & ~ X*/
11366 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11367 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11369 tree t2 = TREE_OPERAND (arg0, 1);
11370 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11371 arg1);
11372 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11373 fold_convert_loc (loc, type, t2),
11374 fold_convert_loc (loc, type, t1));
11375 return t1;
11378 /* (Y | X) ^ X -> Y & ~ X*/
11379 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11380 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11382 tree t2 = TREE_OPERAND (arg0, 0);
11383 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11384 arg1);
11385 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11386 fold_convert_loc (loc, type, t2),
11387 fold_convert_loc (loc, type, t1));
11388 return t1;
11391 /* X ^ (X | Y) -> Y & ~ X*/
11392 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11393 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11395 tree t2 = TREE_OPERAND (arg1, 1);
11396 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11397 arg0);
11398 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11399 fold_convert_loc (loc, type, t2),
11400 fold_convert_loc (loc, type, t1));
11401 return t1;
11404 /* X ^ (Y | X) -> Y & ~ X*/
11405 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11406 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11408 tree t2 = TREE_OPERAND (arg1, 0);
11409 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11410 arg0);
11411 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11412 fold_convert_loc (loc, type, t2),
11413 fold_convert_loc (loc, type, t1));
11414 return t1;
11417 /* Convert ~X ^ ~Y to X ^ Y. */
11418 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11419 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11420 return fold_build2_loc (loc, code, type,
11421 fold_convert_loc (loc, type,
11422 TREE_OPERAND (arg0, 0)),
11423 fold_convert_loc (loc, type,
11424 TREE_OPERAND (arg1, 0)));
11426 /* Convert ~X ^ C to X ^ ~C. */
11427 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11428 && TREE_CODE (arg1) == INTEGER_CST)
11429 return fold_build2_loc (loc, code, type,
11430 fold_convert_loc (loc, type,
11431 TREE_OPERAND (arg0, 0)),
11432 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11434 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11435 if (TREE_CODE (arg0) == BIT_AND_EXPR
11436 && integer_onep (TREE_OPERAND (arg0, 1))
11437 && integer_onep (arg1))
11438 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11439 build_zero_cst (TREE_TYPE (arg0)));
11441 /* Fold (X & Y) ^ Y as ~X & Y. */
11442 if (TREE_CODE (arg0) == BIT_AND_EXPR
11443 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11445 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11446 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11447 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11448 fold_convert_loc (loc, type, arg1));
11450 /* Fold (X & Y) ^ X as ~Y & X. */
11451 if (TREE_CODE (arg0) == BIT_AND_EXPR
11452 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11453 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11455 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11456 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11457 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11458 fold_convert_loc (loc, type, arg1));
11460 /* Fold X ^ (X & Y) as X & ~Y. */
11461 if (TREE_CODE (arg1) == BIT_AND_EXPR
11462 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11464 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11465 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11466 fold_convert_loc (loc, type, arg0),
11467 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11469 /* Fold X ^ (Y & X) as ~Y & X. */
11470 if (TREE_CODE (arg1) == BIT_AND_EXPR
11471 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11472 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11474 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11475 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11476 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11477 fold_convert_loc (loc, type, arg0));
11480 /* See if this can be simplified into a rotate first. If that
11481 is unsuccessful continue in the association code. */
11482 goto bit_rotate;
11484 case BIT_AND_EXPR:
11485 if (integer_all_onesp (arg1))
11486 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11487 if (integer_zerop (arg1))
11488 return omit_one_operand_loc (loc, type, arg1, arg0);
11489 if (operand_equal_p (arg0, arg1, 0))
11490 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11492 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11493 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11494 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11495 || (TREE_CODE (arg0) == EQ_EXPR
11496 && integer_zerop (TREE_OPERAND (arg0, 1))))
11497 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11498 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11500 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11501 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11502 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11503 || (TREE_CODE (arg1) == EQ_EXPR
11504 && integer_zerop (TREE_OPERAND (arg1, 1))))
11505 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11506 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11508 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11509 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11510 && TREE_CODE (arg1) == INTEGER_CST
11511 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11513 tree tmp1 = fold_convert_loc (loc, type, arg1);
11514 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11515 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11516 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11517 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11518 return
11519 fold_convert_loc (loc, type,
11520 fold_build2_loc (loc, BIT_IOR_EXPR,
11521 type, tmp2, tmp3));
11524 /* (X | Y) & Y is (X, Y). */
11525 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11526 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11527 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11528 /* (X | Y) & X is (Y, X). */
11529 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11530 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11531 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11532 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11533 /* X & (X | Y) is (Y, X). */
11534 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11535 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11536 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11537 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11538 /* X & (Y | X) is (Y, X). */
11539 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11540 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11541 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11542 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11544 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11545 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11546 && integer_onep (TREE_OPERAND (arg0, 1))
11547 && integer_onep (arg1))
11549 tree tem2;
11550 tem = TREE_OPERAND (arg0, 0);
11551 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11552 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11553 tem, tem2);
11554 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11555 build_zero_cst (TREE_TYPE (tem)));
11557 /* Fold ~X & 1 as (X & 1) == 0. */
11558 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11559 && integer_onep (arg1))
11561 tree tem2;
11562 tem = TREE_OPERAND (arg0, 0);
11563 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11564 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11565 tem, tem2);
11566 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11567 build_zero_cst (TREE_TYPE (tem)));
11569 /* Fold !X & 1 as X == 0. */
11570 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11571 && integer_onep (arg1))
11573 tem = TREE_OPERAND (arg0, 0);
11574 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11575 build_zero_cst (TREE_TYPE (tem)));
11578 /* Fold (X ^ Y) & Y as ~X & Y. */
11579 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11580 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11582 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11583 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11584 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11585 fold_convert_loc (loc, type, arg1));
11587 /* Fold (X ^ Y) & X as ~Y & X. */
11588 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11589 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11590 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11592 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11593 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11594 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11595 fold_convert_loc (loc, type, arg1));
11597 /* Fold X & (X ^ Y) as X & ~Y. */
11598 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11599 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11601 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11602 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11603 fold_convert_loc (loc, type, arg0),
11604 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11606 /* Fold X & (Y ^ X) as ~Y & X. */
11607 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11608 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11609 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11611 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11612 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11613 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11614 fold_convert_loc (loc, type, arg0));
11617 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11618 multiple of 1 << CST. */
11619 if (TREE_CODE (arg1) == INTEGER_CST)
11621 double_int cst1 = tree_to_double_int (arg1);
11622 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11623 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11624 if ((cst1 & ncst1) == ncst1
11625 && multiple_of_p (type, arg0,
11626 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11627 return fold_convert_loc (loc, type, arg0);
11630 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11631 bits from CST2. */
11632 if (TREE_CODE (arg1) == INTEGER_CST
11633 && TREE_CODE (arg0) == MULT_EXPR
11634 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11636 int arg1tz
11637 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11638 if (arg1tz > 0)
11640 double_int arg1mask, masked;
11641 arg1mask = ~double_int::mask (arg1tz);
11642 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11643 TYPE_UNSIGNED (type));
11644 masked = arg1mask & tree_to_double_int (arg1);
11645 if (masked.is_zero ())
11646 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11647 arg0, arg1);
11648 else if (masked != tree_to_double_int (arg1))
11649 return fold_build2_loc (loc, code, type, op0,
11650 double_int_to_tree (type, masked));
11654 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11655 ((A & N) + B) & M -> (A + B) & M
11656 Similarly if (N & M) == 0,
11657 ((A | N) + B) & M -> (A + B) & M
11658 and for - instead of + (or unary - instead of +)
11659 and/or ^ instead of |.
11660 If B is constant and (B & M) == 0, fold into A & M. */
11661 if (host_integerp (arg1, 1))
11663 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11664 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11665 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11666 && (TREE_CODE (arg0) == PLUS_EXPR
11667 || TREE_CODE (arg0) == MINUS_EXPR
11668 || TREE_CODE (arg0) == NEGATE_EXPR)
11669 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11670 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11672 tree pmop[2];
11673 int which = 0;
11674 unsigned HOST_WIDE_INT cst0;
11676 /* Now we know that arg0 is (C + D) or (C - D) or
11677 -C and arg1 (M) is == (1LL << cst) - 1.
11678 Store C into PMOP[0] and D into PMOP[1]. */
11679 pmop[0] = TREE_OPERAND (arg0, 0);
11680 pmop[1] = NULL;
11681 if (TREE_CODE (arg0) != NEGATE_EXPR)
11683 pmop[1] = TREE_OPERAND (arg0, 1);
11684 which = 1;
11687 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11688 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11689 & cst1) != cst1)
11690 which = -1;
11692 for (; which >= 0; which--)
11693 switch (TREE_CODE (pmop[which]))
11695 case BIT_AND_EXPR:
11696 case BIT_IOR_EXPR:
11697 case BIT_XOR_EXPR:
11698 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11699 != INTEGER_CST)
11700 break;
11701 /* tree_low_cst not used, because we don't care about
11702 the upper bits. */
11703 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11704 cst0 &= cst1;
11705 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11707 if (cst0 != cst1)
11708 break;
11710 else if (cst0 != 0)
11711 break;
11712 /* If C or D is of the form (A & N) where
11713 (N & M) == M, or of the form (A | N) or
11714 (A ^ N) where (N & M) == 0, replace it with A. */
11715 pmop[which] = TREE_OPERAND (pmop[which], 0);
11716 break;
11717 case INTEGER_CST:
11718 /* If C or D is a N where (N & M) == 0, it can be
11719 omitted (assumed 0). */
11720 if ((TREE_CODE (arg0) == PLUS_EXPR
11721 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11722 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11723 pmop[which] = NULL;
11724 break;
11725 default:
11726 break;
11729 /* Only build anything new if we optimized one or both arguments
11730 above. */
11731 if (pmop[0] != TREE_OPERAND (arg0, 0)
11732 || (TREE_CODE (arg0) != NEGATE_EXPR
11733 && pmop[1] != TREE_OPERAND (arg0, 1)))
11735 tree utype = TREE_TYPE (arg0);
11736 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11738 /* Perform the operations in a type that has defined
11739 overflow behavior. */
11740 utype = unsigned_type_for (TREE_TYPE (arg0));
11741 if (pmop[0] != NULL)
11742 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11743 if (pmop[1] != NULL)
11744 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11747 if (TREE_CODE (arg0) == NEGATE_EXPR)
11748 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11749 else if (TREE_CODE (arg0) == PLUS_EXPR)
11751 if (pmop[0] != NULL && pmop[1] != NULL)
11752 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11753 pmop[0], pmop[1]);
11754 else if (pmop[0] != NULL)
11755 tem = pmop[0];
11756 else if (pmop[1] != NULL)
11757 tem = pmop[1];
11758 else
11759 return build_int_cst (type, 0);
11761 else if (pmop[0] == NULL)
11762 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11763 else
11764 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11765 pmop[0], pmop[1]);
11766 /* TEM is now the new binary +, - or unary - replacement. */
11767 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11768 fold_convert_loc (loc, utype, arg1));
11769 return fold_convert_loc (loc, type, tem);
11774 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11775 if (t1 != NULL_TREE)
11776 return t1;
11777 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11778 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11779 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11781 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11783 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11784 && (~TREE_INT_CST_LOW (arg1)
11785 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11786 return
11787 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11790 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11792 This results in more efficient code for machines without a NOR
11793 instruction. Combine will canonicalize to the first form
11794 which will allow use of NOR instructions provided by the
11795 backend if they exist. */
11796 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11797 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11799 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11800 build2 (BIT_IOR_EXPR, type,
11801 fold_convert_loc (loc, type,
11802 TREE_OPERAND (arg0, 0)),
11803 fold_convert_loc (loc, type,
11804 TREE_OPERAND (arg1, 0))));
11807 /* If arg0 is derived from the address of an object or function, we may
11808 be able to fold this expression using the object or function's
11809 alignment. */
11810 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11812 unsigned HOST_WIDE_INT modulus, residue;
11813 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11815 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11816 integer_onep (arg1));
11818 /* This works because modulus is a power of 2. If this weren't the
11819 case, we'd have to replace it by its greatest power-of-2
11820 divisor: modulus & -modulus. */
11821 if (low < modulus)
11822 return build_int_cst (type, residue & low);
11825 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11826 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11827 if the new mask might be further optimized. */
11828 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11829 || TREE_CODE (arg0) == RSHIFT_EXPR)
11830 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11831 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11832 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11833 < TYPE_PRECISION (TREE_TYPE (arg0))
11834 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11835 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11837 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11838 unsigned HOST_WIDE_INT mask
11839 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11840 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11841 tree shift_type = TREE_TYPE (arg0);
11843 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11844 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11845 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11846 && TYPE_PRECISION (TREE_TYPE (arg0))
11847 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11849 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11850 tree arg00 = TREE_OPERAND (arg0, 0);
11851 /* See if more bits can be proven as zero because of
11852 zero extension. */
11853 if (TREE_CODE (arg00) == NOP_EXPR
11854 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11856 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11857 if (TYPE_PRECISION (inner_type)
11858 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11859 && TYPE_PRECISION (inner_type) < prec)
11861 prec = TYPE_PRECISION (inner_type);
11862 /* See if we can shorten the right shift. */
11863 if (shiftc < prec)
11864 shift_type = inner_type;
11867 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11868 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11869 zerobits <<= prec - shiftc;
11870 /* For arithmetic shift if sign bit could be set, zerobits
11871 can contain actually sign bits, so no transformation is
11872 possible, unless MASK masks them all away. In that
11873 case the shift needs to be converted into logical shift. */
11874 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11875 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11877 if ((mask & zerobits) == 0)
11878 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11879 else
11880 zerobits = 0;
11884 /* ((X << 16) & 0xff00) is (X, 0). */
11885 if ((mask & zerobits) == mask)
11886 return omit_one_operand_loc (loc, type,
11887 build_int_cst (type, 0), arg0);
11889 newmask = mask | zerobits;
11890 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11892 /* Only do the transformation if NEWMASK is some integer
11893 mode's mask. */
11894 for (prec = BITS_PER_UNIT;
11895 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11896 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11897 break;
11898 if (prec < HOST_BITS_PER_WIDE_INT
11899 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11901 tree newmaskt;
11903 if (shift_type != TREE_TYPE (arg0))
11905 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11906 fold_convert_loc (loc, shift_type,
11907 TREE_OPERAND (arg0, 0)),
11908 TREE_OPERAND (arg0, 1));
11909 tem = fold_convert_loc (loc, type, tem);
11911 else
11912 tem = op0;
11913 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11914 if (!tree_int_cst_equal (newmaskt, arg1))
11915 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11920 goto associate;
11922 case RDIV_EXPR:
11923 /* Don't touch a floating-point divide by zero unless the mode
11924 of the constant can represent infinity. */
11925 if (TREE_CODE (arg1) == REAL_CST
11926 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11927 && real_zerop (arg1))
11928 return NULL_TREE;
11930 /* Optimize A / A to 1.0 if we don't care about
11931 NaNs or Infinities. Skip the transformation
11932 for non-real operands. */
11933 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11934 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11935 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11936 && operand_equal_p (arg0, arg1, 0))
11938 tree r = build_real (TREE_TYPE (arg0), dconst1);
11940 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11943 /* The complex version of the above A / A optimization. */
11944 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11945 && operand_equal_p (arg0, arg1, 0))
11947 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11948 if (! HONOR_NANS (TYPE_MODE (elem_type))
11949 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11951 tree r = build_real (elem_type, dconst1);
11952 /* omit_two_operands will call fold_convert for us. */
11953 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11957 /* (-A) / (-B) -> A / B */
11958 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11959 return fold_build2_loc (loc, RDIV_EXPR, type,
11960 TREE_OPERAND (arg0, 0),
11961 negate_expr (arg1));
11962 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11963 return fold_build2_loc (loc, RDIV_EXPR, type,
11964 negate_expr (arg0),
11965 TREE_OPERAND (arg1, 0));
11967 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11968 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11969 && real_onep (arg1))
11970 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11972 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11973 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11974 && real_minus_onep (arg1))
11975 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11976 negate_expr (arg0)));
11978 /* If ARG1 is a constant, we can convert this to a multiply by the
11979 reciprocal. This does not have the same rounding properties,
11980 so only do this if -freciprocal-math. We can actually
11981 always safely do it if ARG1 is a power of two, but it's hard to
11982 tell if it is or not in a portable manner. */
11983 if (optimize
11984 && (TREE_CODE (arg1) == REAL_CST
11985 || (TREE_CODE (arg1) == COMPLEX_CST
11986 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11987 || (TREE_CODE (arg1) == VECTOR_CST
11988 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11990 if (flag_reciprocal_math
11991 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11992 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11993 /* Find the reciprocal if optimizing and the result is exact.
11994 TODO: Complex reciprocal not implemented. */
11995 if (TREE_CODE (arg1) != COMPLEX_CST)
11997 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11999 if (inverse)
12000 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12003 /* Convert A/B/C to A/(B*C). */
12004 if (flag_reciprocal_math
12005 && TREE_CODE (arg0) == RDIV_EXPR)
12006 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12007 fold_build2_loc (loc, MULT_EXPR, type,
12008 TREE_OPERAND (arg0, 1), arg1));
12010 /* Convert A/(B/C) to (A/B)*C. */
12011 if (flag_reciprocal_math
12012 && TREE_CODE (arg1) == RDIV_EXPR)
12013 return fold_build2_loc (loc, MULT_EXPR, type,
12014 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12015 TREE_OPERAND (arg1, 0)),
12016 TREE_OPERAND (arg1, 1));
12018 /* Convert C1/(X*C2) into (C1/C2)/X. */
12019 if (flag_reciprocal_math
12020 && TREE_CODE (arg1) == MULT_EXPR
12021 && TREE_CODE (arg0) == REAL_CST
12022 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12024 tree tem = const_binop (RDIV_EXPR, arg0,
12025 TREE_OPERAND (arg1, 1));
12026 if (tem)
12027 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12028 TREE_OPERAND (arg1, 0));
12031 if (flag_unsafe_math_optimizations)
12033 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12034 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12036 /* Optimize sin(x)/cos(x) as tan(x). */
12037 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12038 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12039 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12040 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12041 CALL_EXPR_ARG (arg1, 0), 0))
12043 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12045 if (tanfn != NULL_TREE)
12046 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12049 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12050 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12051 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12052 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12053 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12054 CALL_EXPR_ARG (arg1, 0), 0))
12056 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12058 if (tanfn != NULL_TREE)
12060 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12061 CALL_EXPR_ARG (arg0, 0));
12062 return fold_build2_loc (loc, RDIV_EXPR, type,
12063 build_real (type, dconst1), tmp);
12067 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12068 NaNs or Infinities. */
12069 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12070 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12071 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12073 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12074 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12076 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12077 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12078 && operand_equal_p (arg00, arg01, 0))
12080 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12082 if (cosfn != NULL_TREE)
12083 return build_call_expr_loc (loc, cosfn, 1, arg00);
12087 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12088 NaNs or Infinities. */
12089 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12090 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12091 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12093 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12094 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12096 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12097 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12098 && operand_equal_p (arg00, arg01, 0))
12100 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12102 if (cosfn != NULL_TREE)
12104 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12105 return fold_build2_loc (loc, RDIV_EXPR, type,
12106 build_real (type, dconst1),
12107 tmp);
12112 /* Optimize pow(x,c)/x as pow(x,c-1). */
12113 if (fcode0 == BUILT_IN_POW
12114 || fcode0 == BUILT_IN_POWF
12115 || fcode0 == BUILT_IN_POWL)
12117 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12118 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12119 if (TREE_CODE (arg01) == REAL_CST
12120 && !TREE_OVERFLOW (arg01)
12121 && operand_equal_p (arg1, arg00, 0))
12123 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12124 REAL_VALUE_TYPE c;
12125 tree arg;
12127 c = TREE_REAL_CST (arg01);
12128 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12129 arg = build_real (type, c);
12130 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12134 /* Optimize a/root(b/c) into a*root(c/b). */
12135 if (BUILTIN_ROOT_P (fcode1))
12137 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12139 if (TREE_CODE (rootarg) == RDIV_EXPR)
12141 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12142 tree b = TREE_OPERAND (rootarg, 0);
12143 tree c = TREE_OPERAND (rootarg, 1);
12145 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12147 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12148 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12152 /* Optimize x/expN(y) into x*expN(-y). */
12153 if (BUILTIN_EXPONENT_P (fcode1))
12155 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12156 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12157 arg1 = build_call_expr_loc (loc,
12158 expfn, 1,
12159 fold_convert_loc (loc, type, arg));
12160 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12163 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12164 if (fcode1 == BUILT_IN_POW
12165 || fcode1 == BUILT_IN_POWF
12166 || fcode1 == BUILT_IN_POWL)
12168 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12169 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12170 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12171 tree neg11 = fold_convert_loc (loc, type,
12172 negate_expr (arg11));
12173 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12174 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12177 return NULL_TREE;
12179 case TRUNC_DIV_EXPR:
12180 /* Optimize (X & (-A)) / A where A is a power of 2,
12181 to X >> log2(A) */
12182 if (TREE_CODE (arg0) == BIT_AND_EXPR
12183 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12184 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12186 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12187 arg1, TREE_OPERAND (arg0, 1));
12188 if (sum && integer_zerop (sum)) {
12189 unsigned long pow2;
12191 if (TREE_INT_CST_LOW (arg1))
12192 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12193 else
12194 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12195 + HOST_BITS_PER_WIDE_INT;
12197 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12198 TREE_OPERAND (arg0, 0),
12199 build_int_cst (integer_type_node, pow2));
12203 /* Fall through */
12205 case FLOOR_DIV_EXPR:
12206 /* Simplify A / (B << N) where A and B are positive and B is
12207 a power of 2, to A >> (N + log2(B)). */
12208 strict_overflow_p = false;
12209 if (TREE_CODE (arg1) == LSHIFT_EXPR
12210 && (TYPE_UNSIGNED (type)
12211 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12213 tree sval = TREE_OPERAND (arg1, 0);
12214 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12216 tree sh_cnt = TREE_OPERAND (arg1, 1);
12217 unsigned long pow2;
12219 if (TREE_INT_CST_LOW (sval))
12220 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12221 else
12222 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12223 + HOST_BITS_PER_WIDE_INT;
12225 if (strict_overflow_p)
12226 fold_overflow_warning (("assuming signed overflow does not "
12227 "occur when simplifying A / (B << N)"),
12228 WARN_STRICT_OVERFLOW_MISC);
12230 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12231 sh_cnt,
12232 build_int_cst (TREE_TYPE (sh_cnt),
12233 pow2));
12234 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12235 fold_convert_loc (loc, type, arg0), sh_cnt);
12239 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12240 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12241 if (INTEGRAL_TYPE_P (type)
12242 && TYPE_UNSIGNED (type)
12243 && code == FLOOR_DIV_EXPR)
12244 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12246 /* Fall through */
12248 case ROUND_DIV_EXPR:
12249 case CEIL_DIV_EXPR:
12250 case EXACT_DIV_EXPR:
12251 if (integer_onep (arg1))
12252 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12253 if (integer_zerop (arg1))
12254 return NULL_TREE;
12255 /* X / -1 is -X. */
12256 if (!TYPE_UNSIGNED (type)
12257 && TREE_CODE (arg1) == INTEGER_CST
12258 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12259 && TREE_INT_CST_HIGH (arg1) == -1)
12260 return fold_convert_loc (loc, type, negate_expr (arg0));
12262 /* Convert -A / -B to A / B when the type is signed and overflow is
12263 undefined. */
12264 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12265 && TREE_CODE (arg0) == NEGATE_EXPR
12266 && negate_expr_p (arg1))
12268 if (INTEGRAL_TYPE_P (type))
12269 fold_overflow_warning (("assuming signed overflow does not occur "
12270 "when distributing negation across "
12271 "division"),
12272 WARN_STRICT_OVERFLOW_MISC);
12273 return fold_build2_loc (loc, code, type,
12274 fold_convert_loc (loc, type,
12275 TREE_OPERAND (arg0, 0)),
12276 fold_convert_loc (loc, type,
12277 negate_expr (arg1)));
12279 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12280 && TREE_CODE (arg1) == NEGATE_EXPR
12281 && negate_expr_p (arg0))
12283 if (INTEGRAL_TYPE_P (type))
12284 fold_overflow_warning (("assuming signed overflow does not occur "
12285 "when distributing negation across "
12286 "division"),
12287 WARN_STRICT_OVERFLOW_MISC);
12288 return fold_build2_loc (loc, code, type,
12289 fold_convert_loc (loc, type,
12290 negate_expr (arg0)),
12291 fold_convert_loc (loc, type,
12292 TREE_OPERAND (arg1, 0)));
12295 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12296 operation, EXACT_DIV_EXPR.
12298 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12299 At one time others generated faster code, it's not clear if they do
12300 after the last round to changes to the DIV code in expmed.c. */
12301 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12302 && multiple_of_p (type, arg0, arg1))
12303 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12305 strict_overflow_p = false;
12306 if (TREE_CODE (arg1) == INTEGER_CST
12307 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12308 &strict_overflow_p)))
12310 if (strict_overflow_p)
12311 fold_overflow_warning (("assuming signed overflow does not occur "
12312 "when simplifying division"),
12313 WARN_STRICT_OVERFLOW_MISC);
12314 return fold_convert_loc (loc, type, tem);
12317 return NULL_TREE;
12319 case CEIL_MOD_EXPR:
12320 case FLOOR_MOD_EXPR:
12321 case ROUND_MOD_EXPR:
12322 case TRUNC_MOD_EXPR:
12323 /* X % 1 is always zero, but be sure to preserve any side
12324 effects in X. */
12325 if (integer_onep (arg1))
12326 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12328 /* X % 0, return X % 0 unchanged so that we can get the
12329 proper warnings and errors. */
12330 if (integer_zerop (arg1))
12331 return NULL_TREE;
12333 /* 0 % X is always zero, but be sure to preserve any side
12334 effects in X. Place this after checking for X == 0. */
12335 if (integer_zerop (arg0))
12336 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12338 /* X % -1 is zero. */
12339 if (!TYPE_UNSIGNED (type)
12340 && TREE_CODE (arg1) == INTEGER_CST
12341 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12342 && TREE_INT_CST_HIGH (arg1) == -1)
12343 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12345 /* X % -C is the same as X % C. */
12346 if (code == TRUNC_MOD_EXPR
12347 && !TYPE_UNSIGNED (type)
12348 && TREE_CODE (arg1) == INTEGER_CST
12349 && !TREE_OVERFLOW (arg1)
12350 && TREE_INT_CST_HIGH (arg1) < 0
12351 && !TYPE_OVERFLOW_TRAPS (type)
12352 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12353 && !sign_bit_p (arg1, arg1))
12354 return fold_build2_loc (loc, code, type,
12355 fold_convert_loc (loc, type, arg0),
12356 fold_convert_loc (loc, type,
12357 negate_expr (arg1)));
12359 /* X % -Y is the same as X % Y. */
12360 if (code == TRUNC_MOD_EXPR
12361 && !TYPE_UNSIGNED (type)
12362 && TREE_CODE (arg1) == NEGATE_EXPR
12363 && !TYPE_OVERFLOW_TRAPS (type))
12364 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12365 fold_convert_loc (loc, type,
12366 TREE_OPERAND (arg1, 0)));
12368 strict_overflow_p = false;
12369 if (TREE_CODE (arg1) == INTEGER_CST
12370 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12371 &strict_overflow_p)))
12373 if (strict_overflow_p)
12374 fold_overflow_warning (("assuming signed overflow does not occur "
12375 "when simplifying modulus"),
12376 WARN_STRICT_OVERFLOW_MISC);
12377 return fold_convert_loc (loc, type, tem);
12380 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12381 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12382 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12383 && (TYPE_UNSIGNED (type)
12384 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12386 tree c = arg1;
12387 /* Also optimize A % (C << N) where C is a power of 2,
12388 to A & ((C << N) - 1). */
12389 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12390 c = TREE_OPERAND (arg1, 0);
12392 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12394 tree mask
12395 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12396 build_int_cst (TREE_TYPE (arg1), 1));
12397 if (strict_overflow_p)
12398 fold_overflow_warning (("assuming signed overflow does not "
12399 "occur when simplifying "
12400 "X % (power of two)"),
12401 WARN_STRICT_OVERFLOW_MISC);
12402 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12403 fold_convert_loc (loc, type, arg0),
12404 fold_convert_loc (loc, type, mask));
12408 return NULL_TREE;
12410 case LROTATE_EXPR:
12411 case RROTATE_EXPR:
12412 if (integer_all_onesp (arg0))
12413 return omit_one_operand_loc (loc, type, arg0, arg1);
12414 goto shift;
12416 case RSHIFT_EXPR:
12417 /* Optimize -1 >> x for arithmetic right shifts. */
12418 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12419 && tree_expr_nonnegative_p (arg1))
12420 return omit_one_operand_loc (loc, type, arg0, arg1);
12421 /* ... fall through ... */
12423 case LSHIFT_EXPR:
12424 shift:
12425 if (integer_zerop (arg1))
12426 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12427 if (integer_zerop (arg0))
12428 return omit_one_operand_loc (loc, type, arg0, arg1);
12430 /* Prefer vector1 << scalar to vector1 << vector2
12431 if vector2 is uniform. */
12432 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12433 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12434 return fold_build2_loc (loc, code, type, op0, tem);
12436 /* Since negative shift count is not well-defined,
12437 don't try to compute it in the compiler. */
12438 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12439 return NULL_TREE;
12441 prec = element_precision (type);
12443 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12444 if (TREE_CODE (op0) == code && host_integerp (arg1, true)
12445 && TREE_INT_CST_LOW (arg1) < prec
12446 && host_integerp (TREE_OPERAND (arg0, 1), true)
12447 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12449 unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12450 + TREE_INT_CST_LOW (arg1));
12452 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12453 being well defined. */
12454 if (low >= prec)
12456 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12457 low = low % prec;
12458 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12459 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12460 TREE_OPERAND (arg0, 0));
12461 else
12462 low = prec - 1;
12465 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12466 build_int_cst (TREE_TYPE (arg1), low));
12469 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12470 into x & ((unsigned)-1 >> c) for unsigned types. */
12471 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12472 || (TYPE_UNSIGNED (type)
12473 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12474 && host_integerp (arg1, false)
12475 && TREE_INT_CST_LOW (arg1) < prec
12476 && host_integerp (TREE_OPERAND (arg0, 1), false)
12477 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12479 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12480 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12481 tree lshift;
12482 tree arg00;
12484 if (low0 == low1)
12486 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12488 lshift = build_minus_one_cst (type);
12489 lshift = const_binop (code, lshift, arg1);
12491 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12495 /* Rewrite an LROTATE_EXPR by a constant into an
12496 RROTATE_EXPR by a new constant. */
12497 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12499 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12500 tem = const_binop (MINUS_EXPR, tem, arg1);
12501 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12504 /* If we have a rotate of a bit operation with the rotate count and
12505 the second operand of the bit operation both constant,
12506 permute the two operations. */
12507 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12508 && (TREE_CODE (arg0) == BIT_AND_EXPR
12509 || TREE_CODE (arg0) == BIT_IOR_EXPR
12510 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12511 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12512 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12513 fold_build2_loc (loc, code, type,
12514 TREE_OPERAND (arg0, 0), arg1),
12515 fold_build2_loc (loc, code, type,
12516 TREE_OPERAND (arg0, 1), arg1));
12518 /* Two consecutive rotates adding up to the precision of the
12519 type can be ignored. */
12520 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12521 && TREE_CODE (arg0) == RROTATE_EXPR
12522 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12523 && TREE_INT_CST_HIGH (arg1) == 0
12524 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12525 && ((TREE_INT_CST_LOW (arg1)
12526 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12527 == prec))
12528 return TREE_OPERAND (arg0, 0);
12530 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12531 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12532 if the latter can be further optimized. */
12533 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12534 && TREE_CODE (arg0) == BIT_AND_EXPR
12535 && TREE_CODE (arg1) == INTEGER_CST
12536 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12538 tree mask = fold_build2_loc (loc, code, type,
12539 fold_convert_loc (loc, type,
12540 TREE_OPERAND (arg0, 1)),
12541 arg1);
12542 tree shift = fold_build2_loc (loc, code, type,
12543 fold_convert_loc (loc, type,
12544 TREE_OPERAND (arg0, 0)),
12545 arg1);
12546 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12547 if (tem)
12548 return tem;
12551 return NULL_TREE;
12553 case MIN_EXPR:
12554 if (operand_equal_p (arg0, arg1, 0))
12555 return omit_one_operand_loc (loc, type, arg0, arg1);
12556 if (INTEGRAL_TYPE_P (type)
12557 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12558 return omit_one_operand_loc (loc, type, arg1, arg0);
12559 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12560 if (tem)
12561 return tem;
12562 goto associate;
12564 case MAX_EXPR:
12565 if (operand_equal_p (arg0, arg1, 0))
12566 return omit_one_operand_loc (loc, type, arg0, arg1);
12567 if (INTEGRAL_TYPE_P (type)
12568 && TYPE_MAX_VALUE (type)
12569 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12570 return omit_one_operand_loc (loc, type, arg1, arg0);
12571 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12572 if (tem)
12573 return tem;
12574 goto associate;
12576 case TRUTH_ANDIF_EXPR:
12577 /* Note that the operands of this must be ints
12578 and their values must be 0 or 1.
12579 ("true" is a fixed value perhaps depending on the language.) */
12580 /* If first arg is constant zero, return it. */
12581 if (integer_zerop (arg0))
12582 return fold_convert_loc (loc, type, arg0);
12583 case TRUTH_AND_EXPR:
12584 /* If either arg is constant true, drop it. */
12585 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12586 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12587 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12588 /* Preserve sequence points. */
12589 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12590 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12591 /* If second arg is constant zero, result is zero, but first arg
12592 must be evaluated. */
12593 if (integer_zerop (arg1))
12594 return omit_one_operand_loc (loc, type, arg1, arg0);
12595 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12596 case will be handled here. */
12597 if (integer_zerop (arg0))
12598 return omit_one_operand_loc (loc, type, arg0, arg1);
12600 /* !X && X is always false. */
12601 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12602 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12603 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12604 /* X && !X is always false. */
12605 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12606 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12607 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12609 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12610 means A >= Y && A != MAX, but in this case we know that
12611 A < X <= MAX. */
12613 if (!TREE_SIDE_EFFECTS (arg0)
12614 && !TREE_SIDE_EFFECTS (arg1))
12616 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12617 if (tem && !operand_equal_p (tem, arg0, 0))
12618 return fold_build2_loc (loc, code, type, tem, arg1);
12620 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12621 if (tem && !operand_equal_p (tem, arg1, 0))
12622 return fold_build2_loc (loc, code, type, arg0, tem);
12625 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12626 != NULL_TREE)
12627 return tem;
12629 return NULL_TREE;
12631 case TRUTH_ORIF_EXPR:
12632 /* Note that the operands of this must be ints
12633 and their values must be 0 or true.
12634 ("true" is a fixed value perhaps depending on the language.) */
12635 /* If first arg is constant true, return it. */
12636 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12637 return fold_convert_loc (loc, type, arg0);
12638 case TRUTH_OR_EXPR:
12639 /* If either arg is constant zero, drop it. */
12640 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12641 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12642 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12643 /* Preserve sequence points. */
12644 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12645 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12646 /* If second arg is constant true, result is true, but we must
12647 evaluate first arg. */
12648 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12649 return omit_one_operand_loc (loc, type, arg1, arg0);
12650 /* Likewise for first arg, but note this only occurs here for
12651 TRUTH_OR_EXPR. */
12652 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12653 return omit_one_operand_loc (loc, type, arg0, arg1);
12655 /* !X || X is always true. */
12656 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12657 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12658 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12659 /* X || !X is always true. */
12660 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12661 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12662 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12664 /* (X && !Y) || (!X && Y) is X ^ Y */
12665 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12666 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12668 tree a0, a1, l0, l1, n0, n1;
12670 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12671 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12673 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12674 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12676 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12677 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12679 if ((operand_equal_p (n0, a0, 0)
12680 && operand_equal_p (n1, a1, 0))
12681 || (operand_equal_p (n0, a1, 0)
12682 && operand_equal_p (n1, a0, 0)))
12683 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12686 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12687 != NULL_TREE)
12688 return tem;
12690 return NULL_TREE;
12692 case TRUTH_XOR_EXPR:
12693 /* If the second arg is constant zero, drop it. */
12694 if (integer_zerop (arg1))
12695 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12696 /* If the second arg is constant true, this is a logical inversion. */
12697 if (integer_onep (arg1))
12699 tem = invert_truthvalue_loc (loc, arg0);
12700 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12702 /* Identical arguments cancel to zero. */
12703 if (operand_equal_p (arg0, arg1, 0))
12704 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12706 /* !X ^ X is always true. */
12707 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12708 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12709 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12711 /* X ^ !X is always true. */
12712 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12713 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12714 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12716 return NULL_TREE;
12718 case EQ_EXPR:
12719 case NE_EXPR:
12720 STRIP_NOPS (arg0);
12721 STRIP_NOPS (arg1);
12723 tem = fold_comparison (loc, code, type, op0, op1);
12724 if (tem != NULL_TREE)
12725 return tem;
12727 /* bool_var != 0 becomes bool_var. */
12728 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12729 && code == NE_EXPR)
12730 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12732 /* bool_var == 1 becomes bool_var. */
12733 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12734 && code == EQ_EXPR)
12735 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12737 /* bool_var != 1 becomes !bool_var. */
12738 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12739 && code == NE_EXPR)
12740 return fold_convert_loc (loc, type,
12741 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12742 TREE_TYPE (arg0), arg0));
12744 /* bool_var == 0 becomes !bool_var. */
12745 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12746 && code == EQ_EXPR)
12747 return fold_convert_loc (loc, type,
12748 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12749 TREE_TYPE (arg0), arg0));
12751 /* !exp != 0 becomes !exp */
12752 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12753 && code == NE_EXPR)
12754 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12756 /* If this is an equality comparison of the address of two non-weak,
12757 unaliased symbols neither of which are extern (since we do not
12758 have access to attributes for externs), then we know the result. */
12759 if (TREE_CODE (arg0) == ADDR_EXPR
12760 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12761 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12762 && ! lookup_attribute ("alias",
12763 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12764 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12765 && TREE_CODE (arg1) == ADDR_EXPR
12766 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12767 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12768 && ! lookup_attribute ("alias",
12769 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12770 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12772 /* We know that we're looking at the address of two
12773 non-weak, unaliased, static _DECL nodes.
12775 It is both wasteful and incorrect to call operand_equal_p
12776 to compare the two ADDR_EXPR nodes. It is wasteful in that
12777 all we need to do is test pointer equality for the arguments
12778 to the two ADDR_EXPR nodes. It is incorrect to use
12779 operand_equal_p as that function is NOT equivalent to a
12780 C equality test. It can in fact return false for two
12781 objects which would test as equal using the C equality
12782 operator. */
12783 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12784 return constant_boolean_node (equal
12785 ? code == EQ_EXPR : code != EQ_EXPR,
12786 type);
12789 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12790 a MINUS_EXPR of a constant, we can convert it into a comparison with
12791 a revised constant as long as no overflow occurs. */
12792 if (TREE_CODE (arg1) == INTEGER_CST
12793 && (TREE_CODE (arg0) == PLUS_EXPR
12794 || TREE_CODE (arg0) == MINUS_EXPR)
12795 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12796 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12797 ? MINUS_EXPR : PLUS_EXPR,
12798 fold_convert_loc (loc, TREE_TYPE (arg0),
12799 arg1),
12800 TREE_OPERAND (arg0, 1)))
12801 && !TREE_OVERFLOW (tem))
12802 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12804 /* Similarly for a NEGATE_EXPR. */
12805 if (TREE_CODE (arg0) == NEGATE_EXPR
12806 && TREE_CODE (arg1) == INTEGER_CST
12807 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12808 arg1)))
12809 && TREE_CODE (tem) == INTEGER_CST
12810 && !TREE_OVERFLOW (tem))
12811 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12813 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12814 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12815 && TREE_CODE (arg1) == INTEGER_CST
12816 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12817 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12818 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12819 fold_convert_loc (loc,
12820 TREE_TYPE (arg0),
12821 arg1),
12822 TREE_OPERAND (arg0, 1)));
12824 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12825 if ((TREE_CODE (arg0) == PLUS_EXPR
12826 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12827 || TREE_CODE (arg0) == MINUS_EXPR)
12828 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12829 0)),
12830 arg1, 0)
12831 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12832 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12834 tree val = TREE_OPERAND (arg0, 1);
12835 return omit_two_operands_loc (loc, type,
12836 fold_build2_loc (loc, code, type,
12837 val,
12838 build_int_cst (TREE_TYPE (val),
12839 0)),
12840 TREE_OPERAND (arg0, 0), arg1);
12843 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12844 if (TREE_CODE (arg0) == MINUS_EXPR
12845 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12846 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12847 1)),
12848 arg1, 0)
12849 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12851 return omit_two_operands_loc (loc, type,
12852 code == NE_EXPR
12853 ? boolean_true_node : boolean_false_node,
12854 TREE_OPERAND (arg0, 1), arg1);
12857 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12858 for !=. Don't do this for ordered comparisons due to overflow. */
12859 if (TREE_CODE (arg0) == MINUS_EXPR
12860 && integer_zerop (arg1))
12861 return fold_build2_loc (loc, code, type,
12862 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12864 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12865 if (TREE_CODE (arg0) == ABS_EXPR
12866 && (integer_zerop (arg1) || real_zerop (arg1)))
12867 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12869 /* If this is an EQ or NE comparison with zero and ARG0 is
12870 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12871 two operations, but the latter can be done in one less insn
12872 on machines that have only two-operand insns or on which a
12873 constant cannot be the first operand. */
12874 if (TREE_CODE (arg0) == BIT_AND_EXPR
12875 && integer_zerop (arg1))
12877 tree arg00 = TREE_OPERAND (arg0, 0);
12878 tree arg01 = TREE_OPERAND (arg0, 1);
12879 if (TREE_CODE (arg00) == LSHIFT_EXPR
12880 && integer_onep (TREE_OPERAND (arg00, 0)))
12882 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12883 arg01, TREE_OPERAND (arg00, 1));
12884 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12885 build_int_cst (TREE_TYPE (arg0), 1));
12886 return fold_build2_loc (loc, code, type,
12887 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12888 arg1);
12890 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12891 && integer_onep (TREE_OPERAND (arg01, 0)))
12893 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12894 arg00, TREE_OPERAND (arg01, 1));
12895 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12896 build_int_cst (TREE_TYPE (arg0), 1));
12897 return fold_build2_loc (loc, code, type,
12898 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12899 arg1);
12903 /* If this is an NE or EQ comparison of zero against the result of a
12904 signed MOD operation whose second operand is a power of 2, make
12905 the MOD operation unsigned since it is simpler and equivalent. */
12906 if (integer_zerop (arg1)
12907 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12908 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12909 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12910 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12911 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12912 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12914 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12915 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12916 fold_convert_loc (loc, newtype,
12917 TREE_OPERAND (arg0, 0)),
12918 fold_convert_loc (loc, newtype,
12919 TREE_OPERAND (arg0, 1)));
12921 return fold_build2_loc (loc, code, type, newmod,
12922 fold_convert_loc (loc, newtype, arg1));
12925 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12926 C1 is a valid shift constant, and C2 is a power of two, i.e.
12927 a single bit. */
12928 if (TREE_CODE (arg0) == BIT_AND_EXPR
12929 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12930 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12931 == INTEGER_CST
12932 && integer_pow2p (TREE_OPERAND (arg0, 1))
12933 && integer_zerop (arg1))
12935 tree itype = TREE_TYPE (arg0);
12936 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12937 prec = TYPE_PRECISION (itype);
12939 /* Check for a valid shift count. */
12940 if (TREE_INT_CST_HIGH (arg001) == 0
12941 && TREE_INT_CST_LOW (arg001) < prec)
12943 tree arg01 = TREE_OPERAND (arg0, 1);
12944 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12945 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12946 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12947 can be rewritten as (X & (C2 << C1)) != 0. */
12948 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12950 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12951 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12952 return fold_build2_loc (loc, code, type, tem,
12953 fold_convert_loc (loc, itype, arg1));
12955 /* Otherwise, for signed (arithmetic) shifts,
12956 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12957 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12958 else if (!TYPE_UNSIGNED (itype))
12959 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12960 arg000, build_int_cst (itype, 0));
12961 /* Otherwise, of unsigned (logical) shifts,
12962 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12963 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12964 else
12965 return omit_one_operand_loc (loc, type,
12966 code == EQ_EXPR ? integer_one_node
12967 : integer_zero_node,
12968 arg000);
12972 /* If we have (A & C) == C where C is a power of 2, convert this into
12973 (A & C) != 0. Similarly for NE_EXPR. */
12974 if (TREE_CODE (arg0) == BIT_AND_EXPR
12975 && integer_pow2p (TREE_OPERAND (arg0, 1))
12976 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12977 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12978 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12979 integer_zero_node));
12981 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12982 bit, then fold the expression into A < 0 or A >= 0. */
12983 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12984 if (tem)
12985 return tem;
12987 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12988 Similarly for NE_EXPR. */
12989 if (TREE_CODE (arg0) == BIT_AND_EXPR
12990 && TREE_CODE (arg1) == INTEGER_CST
12991 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12993 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12994 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12995 TREE_OPERAND (arg0, 1));
12996 tree dandnotc
12997 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12998 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12999 notc);
13000 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13001 if (integer_nonzerop (dandnotc))
13002 return omit_one_operand_loc (loc, type, rslt, arg0);
13005 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13006 Similarly for NE_EXPR. */
13007 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13008 && TREE_CODE (arg1) == INTEGER_CST
13009 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13011 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13012 tree candnotd
13013 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13014 TREE_OPERAND (arg0, 1),
13015 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13016 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13017 if (integer_nonzerop (candnotd))
13018 return omit_one_operand_loc (loc, type, rslt, arg0);
13021 /* If this is a comparison of a field, we may be able to simplify it. */
13022 if ((TREE_CODE (arg0) == COMPONENT_REF
13023 || TREE_CODE (arg0) == BIT_FIELD_REF)
13024 /* Handle the constant case even without -O
13025 to make sure the warnings are given. */
13026 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13028 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13029 if (t1)
13030 return t1;
13033 /* Optimize comparisons of strlen vs zero to a compare of the
13034 first character of the string vs zero. To wit,
13035 strlen(ptr) == 0 => *ptr == 0
13036 strlen(ptr) != 0 => *ptr != 0
13037 Other cases should reduce to one of these two (or a constant)
13038 due to the return value of strlen being unsigned. */
13039 if (TREE_CODE (arg0) == CALL_EXPR
13040 && integer_zerop (arg1))
13042 tree fndecl = get_callee_fndecl (arg0);
13044 if (fndecl
13045 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13046 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13047 && call_expr_nargs (arg0) == 1
13048 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13050 tree iref = build_fold_indirect_ref_loc (loc,
13051 CALL_EXPR_ARG (arg0, 0));
13052 return fold_build2_loc (loc, code, type, iref,
13053 build_int_cst (TREE_TYPE (iref), 0));
13057 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13058 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13059 if (TREE_CODE (arg0) == RSHIFT_EXPR
13060 && integer_zerop (arg1)
13061 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13063 tree arg00 = TREE_OPERAND (arg0, 0);
13064 tree arg01 = TREE_OPERAND (arg0, 1);
13065 tree itype = TREE_TYPE (arg00);
13066 if (TREE_INT_CST_HIGH (arg01) == 0
13067 && TREE_INT_CST_LOW (arg01)
13068 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13070 if (TYPE_UNSIGNED (itype))
13072 itype = signed_type_for (itype);
13073 arg00 = fold_convert_loc (loc, itype, arg00);
13075 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13076 type, arg00, build_zero_cst (itype));
13080 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13081 if (integer_zerop (arg1)
13082 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13083 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13084 TREE_OPERAND (arg0, 1));
13086 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13087 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13088 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13089 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13090 build_zero_cst (TREE_TYPE (arg0)));
13091 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13092 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13093 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13094 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13095 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13096 build_zero_cst (TREE_TYPE (arg0)));
13098 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13099 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13100 && TREE_CODE (arg1) == INTEGER_CST
13101 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13102 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13103 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13104 TREE_OPERAND (arg0, 1), arg1));
13106 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13107 (X & C) == 0 when C is a single bit. */
13108 if (TREE_CODE (arg0) == BIT_AND_EXPR
13109 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13110 && integer_zerop (arg1)
13111 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13113 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13114 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13115 TREE_OPERAND (arg0, 1));
13116 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13117 type, tem,
13118 fold_convert_loc (loc, TREE_TYPE (arg0),
13119 arg1));
13122 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13123 constant C is a power of two, i.e. a single bit. */
13124 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13125 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13126 && integer_zerop (arg1)
13127 && integer_pow2p (TREE_OPERAND (arg0, 1))
13128 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13129 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13131 tree arg00 = TREE_OPERAND (arg0, 0);
13132 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13133 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13136 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13137 when is C is a power of two, i.e. a single bit. */
13138 if (TREE_CODE (arg0) == BIT_AND_EXPR
13139 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13140 && integer_zerop (arg1)
13141 && integer_pow2p (TREE_OPERAND (arg0, 1))
13142 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13143 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13145 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13146 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13147 arg000, TREE_OPERAND (arg0, 1));
13148 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13149 tem, build_int_cst (TREE_TYPE (tem), 0));
13152 if (integer_zerop (arg1)
13153 && tree_expr_nonzero_p (arg0))
13155 tree res = constant_boolean_node (code==NE_EXPR, type);
13156 return omit_one_operand_loc (loc, type, res, arg0);
13159 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13160 if (TREE_CODE (arg0) == NEGATE_EXPR
13161 && TREE_CODE (arg1) == NEGATE_EXPR)
13162 return fold_build2_loc (loc, code, type,
13163 TREE_OPERAND (arg0, 0),
13164 fold_convert_loc (loc, TREE_TYPE (arg0),
13165 TREE_OPERAND (arg1, 0)));
13167 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13168 if (TREE_CODE (arg0) == BIT_AND_EXPR
13169 && TREE_CODE (arg1) == BIT_AND_EXPR)
13171 tree arg00 = TREE_OPERAND (arg0, 0);
13172 tree arg01 = TREE_OPERAND (arg0, 1);
13173 tree arg10 = TREE_OPERAND (arg1, 0);
13174 tree arg11 = TREE_OPERAND (arg1, 1);
13175 tree itype = TREE_TYPE (arg0);
13177 if (operand_equal_p (arg01, arg11, 0))
13178 return fold_build2_loc (loc, code, type,
13179 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13180 fold_build2_loc (loc,
13181 BIT_XOR_EXPR, itype,
13182 arg00, arg10),
13183 arg01),
13184 build_zero_cst (itype));
13186 if (operand_equal_p (arg01, arg10, 0))
13187 return fold_build2_loc (loc, code, type,
13188 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13189 fold_build2_loc (loc,
13190 BIT_XOR_EXPR, itype,
13191 arg00, arg11),
13192 arg01),
13193 build_zero_cst (itype));
13195 if (operand_equal_p (arg00, arg11, 0))
13196 return fold_build2_loc (loc, code, type,
13197 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13198 fold_build2_loc (loc,
13199 BIT_XOR_EXPR, itype,
13200 arg01, arg10),
13201 arg00),
13202 build_zero_cst (itype));
13204 if (operand_equal_p (arg00, arg10, 0))
13205 return fold_build2_loc (loc, code, type,
13206 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13207 fold_build2_loc (loc,
13208 BIT_XOR_EXPR, itype,
13209 arg01, arg11),
13210 arg00),
13211 build_zero_cst (itype));
13214 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13215 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13217 tree arg00 = TREE_OPERAND (arg0, 0);
13218 tree arg01 = TREE_OPERAND (arg0, 1);
13219 tree arg10 = TREE_OPERAND (arg1, 0);
13220 tree arg11 = TREE_OPERAND (arg1, 1);
13221 tree itype = TREE_TYPE (arg0);
13223 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13224 operand_equal_p guarantees no side-effects so we don't need
13225 to use omit_one_operand on Z. */
13226 if (operand_equal_p (arg01, arg11, 0))
13227 return fold_build2_loc (loc, code, type, arg00,
13228 fold_convert_loc (loc, TREE_TYPE (arg00),
13229 arg10));
13230 if (operand_equal_p (arg01, arg10, 0))
13231 return fold_build2_loc (loc, code, type, arg00,
13232 fold_convert_loc (loc, TREE_TYPE (arg00),
13233 arg11));
13234 if (operand_equal_p (arg00, arg11, 0))
13235 return fold_build2_loc (loc, code, type, arg01,
13236 fold_convert_loc (loc, TREE_TYPE (arg01),
13237 arg10));
13238 if (operand_equal_p (arg00, arg10, 0))
13239 return fold_build2_loc (loc, code, type, arg01,
13240 fold_convert_loc (loc, TREE_TYPE (arg01),
13241 arg11));
13243 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13244 if (TREE_CODE (arg01) == INTEGER_CST
13245 && TREE_CODE (arg11) == INTEGER_CST)
13247 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13248 fold_convert_loc (loc, itype, arg11));
13249 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13250 return fold_build2_loc (loc, code, type, tem,
13251 fold_convert_loc (loc, itype, arg10));
13255 /* Attempt to simplify equality/inequality comparisons of complex
13256 values. Only lower the comparison if the result is known or
13257 can be simplified to a single scalar comparison. */
13258 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13259 || TREE_CODE (arg0) == COMPLEX_CST)
13260 && (TREE_CODE (arg1) == COMPLEX_EXPR
13261 || TREE_CODE (arg1) == COMPLEX_CST))
13263 tree real0, imag0, real1, imag1;
13264 tree rcond, icond;
13266 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13268 real0 = TREE_OPERAND (arg0, 0);
13269 imag0 = TREE_OPERAND (arg0, 1);
13271 else
13273 real0 = TREE_REALPART (arg0);
13274 imag0 = TREE_IMAGPART (arg0);
13277 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13279 real1 = TREE_OPERAND (arg1, 0);
13280 imag1 = TREE_OPERAND (arg1, 1);
13282 else
13284 real1 = TREE_REALPART (arg1);
13285 imag1 = TREE_IMAGPART (arg1);
13288 rcond = fold_binary_loc (loc, code, type, real0, real1);
13289 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13291 if (integer_zerop (rcond))
13293 if (code == EQ_EXPR)
13294 return omit_two_operands_loc (loc, type, boolean_false_node,
13295 imag0, imag1);
13296 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13298 else
13300 if (code == NE_EXPR)
13301 return omit_two_operands_loc (loc, type, boolean_true_node,
13302 imag0, imag1);
13303 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13307 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13308 if (icond && TREE_CODE (icond) == INTEGER_CST)
13310 if (integer_zerop (icond))
13312 if (code == EQ_EXPR)
13313 return omit_two_operands_loc (loc, type, boolean_false_node,
13314 real0, real1);
13315 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13317 else
13319 if (code == NE_EXPR)
13320 return omit_two_operands_loc (loc, type, boolean_true_node,
13321 real0, real1);
13322 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13327 return NULL_TREE;
13329 case LT_EXPR:
13330 case GT_EXPR:
13331 case LE_EXPR:
13332 case GE_EXPR:
13333 tem = fold_comparison (loc, code, type, op0, op1);
13334 if (tem != NULL_TREE)
13335 return tem;
13337 /* Transform comparisons of the form X +- C CMP X. */
13338 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13339 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13340 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13341 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13342 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13343 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13345 tree arg01 = TREE_OPERAND (arg0, 1);
13346 enum tree_code code0 = TREE_CODE (arg0);
13347 int is_positive;
13349 if (TREE_CODE (arg01) == REAL_CST)
13350 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13351 else
13352 is_positive = tree_int_cst_sgn (arg01);
13354 /* (X - c) > X becomes false. */
13355 if (code == GT_EXPR
13356 && ((code0 == MINUS_EXPR && is_positive >= 0)
13357 || (code0 == PLUS_EXPR && is_positive <= 0)))
13359 if (TREE_CODE (arg01) == INTEGER_CST
13360 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13361 fold_overflow_warning (("assuming signed overflow does not "
13362 "occur when assuming that (X - c) > X "
13363 "is always false"),
13364 WARN_STRICT_OVERFLOW_ALL);
13365 return constant_boolean_node (0, type);
13368 /* Likewise (X + c) < X becomes false. */
13369 if (code == LT_EXPR
13370 && ((code0 == PLUS_EXPR && is_positive >= 0)
13371 || (code0 == MINUS_EXPR && is_positive <= 0)))
13373 if (TREE_CODE (arg01) == INTEGER_CST
13374 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13375 fold_overflow_warning (("assuming signed overflow does not "
13376 "occur when assuming that "
13377 "(X + c) < X is always false"),
13378 WARN_STRICT_OVERFLOW_ALL);
13379 return constant_boolean_node (0, type);
13382 /* Convert (X - c) <= X to true. */
13383 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13384 && code == LE_EXPR
13385 && ((code0 == MINUS_EXPR && is_positive >= 0)
13386 || (code0 == PLUS_EXPR && is_positive <= 0)))
13388 if (TREE_CODE (arg01) == INTEGER_CST
13389 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13390 fold_overflow_warning (("assuming signed overflow does not "
13391 "occur when assuming that "
13392 "(X - c) <= X is always true"),
13393 WARN_STRICT_OVERFLOW_ALL);
13394 return constant_boolean_node (1, type);
13397 /* Convert (X + c) >= X to true. */
13398 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13399 && code == GE_EXPR
13400 && ((code0 == PLUS_EXPR && is_positive >= 0)
13401 || (code0 == MINUS_EXPR && is_positive <= 0)))
13403 if (TREE_CODE (arg01) == INTEGER_CST
13404 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13405 fold_overflow_warning (("assuming signed overflow does not "
13406 "occur when assuming that "
13407 "(X + c) >= X is always true"),
13408 WARN_STRICT_OVERFLOW_ALL);
13409 return constant_boolean_node (1, type);
13412 if (TREE_CODE (arg01) == INTEGER_CST)
13414 /* Convert X + c > X and X - c < X to true for integers. */
13415 if (code == GT_EXPR
13416 && ((code0 == PLUS_EXPR && is_positive > 0)
13417 || (code0 == MINUS_EXPR && is_positive < 0)))
13419 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13420 fold_overflow_warning (("assuming signed overflow does "
13421 "not occur when assuming that "
13422 "(X + c) > X is always true"),
13423 WARN_STRICT_OVERFLOW_ALL);
13424 return constant_boolean_node (1, type);
13427 if (code == LT_EXPR
13428 && ((code0 == MINUS_EXPR && is_positive > 0)
13429 || (code0 == PLUS_EXPR && is_positive < 0)))
13431 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13432 fold_overflow_warning (("assuming signed overflow does "
13433 "not occur when assuming that "
13434 "(X - c) < X is always true"),
13435 WARN_STRICT_OVERFLOW_ALL);
13436 return constant_boolean_node (1, type);
13439 /* Convert X + c <= X and X - c >= X to false for integers. */
13440 if (code == LE_EXPR
13441 && ((code0 == PLUS_EXPR && is_positive > 0)
13442 || (code0 == MINUS_EXPR && is_positive < 0)))
13444 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13445 fold_overflow_warning (("assuming signed overflow does "
13446 "not occur when assuming that "
13447 "(X + c) <= X is always false"),
13448 WARN_STRICT_OVERFLOW_ALL);
13449 return constant_boolean_node (0, type);
13452 if (code == GE_EXPR
13453 && ((code0 == MINUS_EXPR && is_positive > 0)
13454 || (code0 == PLUS_EXPR && is_positive < 0)))
13456 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13457 fold_overflow_warning (("assuming signed overflow does "
13458 "not occur when assuming that "
13459 "(X - c) >= X is always false"),
13460 WARN_STRICT_OVERFLOW_ALL);
13461 return constant_boolean_node (0, type);
13466 /* Comparisons with the highest or lowest possible integer of
13467 the specified precision will have known values. */
13469 tree arg1_type = TREE_TYPE (arg1);
13470 unsigned int width = TYPE_PRECISION (arg1_type);
13472 if (TREE_CODE (arg1) == INTEGER_CST
13473 && width <= HOST_BITS_PER_DOUBLE_INT
13474 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13476 HOST_WIDE_INT signed_max_hi;
13477 unsigned HOST_WIDE_INT signed_max_lo;
13478 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13480 if (width <= HOST_BITS_PER_WIDE_INT)
13482 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13483 - 1;
13484 signed_max_hi = 0;
13485 max_hi = 0;
13487 if (TYPE_UNSIGNED (arg1_type))
13489 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13490 min_lo = 0;
13491 min_hi = 0;
13493 else
13495 max_lo = signed_max_lo;
13496 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13497 min_hi = -1;
13500 else
13502 width -= HOST_BITS_PER_WIDE_INT;
13503 signed_max_lo = -1;
13504 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13505 - 1;
13506 max_lo = -1;
13507 min_lo = 0;
13509 if (TYPE_UNSIGNED (arg1_type))
13511 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13512 min_hi = 0;
13514 else
13516 max_hi = signed_max_hi;
13517 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13521 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13522 && TREE_INT_CST_LOW (arg1) == max_lo)
13523 switch (code)
13525 case GT_EXPR:
13526 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13528 case GE_EXPR:
13529 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13531 case LE_EXPR:
13532 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13534 case LT_EXPR:
13535 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13537 /* The GE_EXPR and LT_EXPR cases above are not normally
13538 reached because of previous transformations. */
13540 default:
13541 break;
13543 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13544 == max_hi
13545 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13546 switch (code)
13548 case GT_EXPR:
13549 arg1 = const_binop (PLUS_EXPR, arg1,
13550 build_int_cst (TREE_TYPE (arg1), 1));
13551 return fold_build2_loc (loc, EQ_EXPR, type,
13552 fold_convert_loc (loc,
13553 TREE_TYPE (arg1), arg0),
13554 arg1);
13555 case LE_EXPR:
13556 arg1 = const_binop (PLUS_EXPR, arg1,
13557 build_int_cst (TREE_TYPE (arg1), 1));
13558 return fold_build2_loc (loc, NE_EXPR, type,
13559 fold_convert_loc (loc, TREE_TYPE (arg1),
13560 arg0),
13561 arg1);
13562 default:
13563 break;
13565 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13566 == min_hi
13567 && TREE_INT_CST_LOW (arg1) == min_lo)
13568 switch (code)
13570 case LT_EXPR:
13571 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13573 case LE_EXPR:
13574 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13576 case GE_EXPR:
13577 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13579 case GT_EXPR:
13580 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13582 default:
13583 break;
13585 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13586 == min_hi
13587 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13588 switch (code)
13590 case GE_EXPR:
13591 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13592 return fold_build2_loc (loc, NE_EXPR, type,
13593 fold_convert_loc (loc,
13594 TREE_TYPE (arg1), arg0),
13595 arg1);
13596 case LT_EXPR:
13597 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13598 return fold_build2_loc (loc, EQ_EXPR, type,
13599 fold_convert_loc (loc, TREE_TYPE (arg1),
13600 arg0),
13601 arg1);
13602 default:
13603 break;
13606 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13607 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13608 && TYPE_UNSIGNED (arg1_type)
13609 /* We will flip the signedness of the comparison operator
13610 associated with the mode of arg1, so the sign bit is
13611 specified by this mode. Check that arg1 is the signed
13612 max associated with this sign bit. */
13613 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13614 /* signed_type does not work on pointer types. */
13615 && INTEGRAL_TYPE_P (arg1_type))
13617 /* The following case also applies to X < signed_max+1
13618 and X >= signed_max+1 because previous transformations. */
13619 if (code == LE_EXPR || code == GT_EXPR)
13621 tree st;
13622 st = signed_type_for (TREE_TYPE (arg1));
13623 return fold_build2_loc (loc,
13624 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13625 type, fold_convert_loc (loc, st, arg0),
13626 build_int_cst (st, 0));
13632 /* If we are comparing an ABS_EXPR with a constant, we can
13633 convert all the cases into explicit comparisons, but they may
13634 well not be faster than doing the ABS and one comparison.
13635 But ABS (X) <= C is a range comparison, which becomes a subtraction
13636 and a comparison, and is probably faster. */
13637 if (code == LE_EXPR
13638 && TREE_CODE (arg1) == INTEGER_CST
13639 && TREE_CODE (arg0) == ABS_EXPR
13640 && ! TREE_SIDE_EFFECTS (arg0)
13641 && (0 != (tem = negate_expr (arg1)))
13642 && TREE_CODE (tem) == INTEGER_CST
13643 && !TREE_OVERFLOW (tem))
13644 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13645 build2 (GE_EXPR, type,
13646 TREE_OPERAND (arg0, 0), tem),
13647 build2 (LE_EXPR, type,
13648 TREE_OPERAND (arg0, 0), arg1));
13650 /* Convert ABS_EXPR<x> >= 0 to true. */
13651 strict_overflow_p = false;
13652 if (code == GE_EXPR
13653 && (integer_zerop (arg1)
13654 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13655 && real_zerop (arg1)))
13656 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13658 if (strict_overflow_p)
13659 fold_overflow_warning (("assuming signed overflow does not occur "
13660 "when simplifying comparison of "
13661 "absolute value and zero"),
13662 WARN_STRICT_OVERFLOW_CONDITIONAL);
13663 return omit_one_operand_loc (loc, type,
13664 constant_boolean_node (true, type),
13665 arg0);
13668 /* Convert ABS_EXPR<x> < 0 to false. */
13669 strict_overflow_p = false;
13670 if (code == LT_EXPR
13671 && (integer_zerop (arg1) || real_zerop (arg1))
13672 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13674 if (strict_overflow_p)
13675 fold_overflow_warning (("assuming signed overflow does not occur "
13676 "when simplifying comparison of "
13677 "absolute value and zero"),
13678 WARN_STRICT_OVERFLOW_CONDITIONAL);
13679 return omit_one_operand_loc (loc, type,
13680 constant_boolean_node (false, type),
13681 arg0);
13684 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13685 and similarly for >= into !=. */
13686 if ((code == LT_EXPR || code == GE_EXPR)
13687 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13688 && TREE_CODE (arg1) == LSHIFT_EXPR
13689 && integer_onep (TREE_OPERAND (arg1, 0)))
13690 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13691 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13692 TREE_OPERAND (arg1, 1)),
13693 build_zero_cst (TREE_TYPE (arg0)));
13695 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13696 otherwise Y might be >= # of bits in X's type and thus e.g.
13697 (unsigned char) (1 << Y) for Y 15 might be 0.
13698 If the cast is widening, then 1 << Y should have unsigned type,
13699 otherwise if Y is number of bits in the signed shift type minus 1,
13700 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13701 31 might be 0xffffffff80000000. */
13702 if ((code == LT_EXPR || code == GE_EXPR)
13703 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13704 && CONVERT_EXPR_P (arg1)
13705 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13706 && (TYPE_PRECISION (TREE_TYPE (arg1))
13707 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13708 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13709 || (TYPE_PRECISION (TREE_TYPE (arg1))
13710 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13711 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13713 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13714 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13715 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13716 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13717 build_zero_cst (TREE_TYPE (arg0)));
13720 return NULL_TREE;
13722 case UNORDERED_EXPR:
13723 case ORDERED_EXPR:
13724 case UNLT_EXPR:
13725 case UNLE_EXPR:
13726 case UNGT_EXPR:
13727 case UNGE_EXPR:
13728 case UNEQ_EXPR:
13729 case LTGT_EXPR:
13730 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13732 t1 = fold_relational_const (code, type, arg0, arg1);
13733 if (t1 != NULL_TREE)
13734 return t1;
13737 /* If the first operand is NaN, the result is constant. */
13738 if (TREE_CODE (arg0) == REAL_CST
13739 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13740 && (code != LTGT_EXPR || ! flag_trapping_math))
13742 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13743 ? integer_zero_node
13744 : integer_one_node;
13745 return omit_one_operand_loc (loc, type, t1, arg1);
13748 /* If the second operand is NaN, the result is constant. */
13749 if (TREE_CODE (arg1) == REAL_CST
13750 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13751 && (code != LTGT_EXPR || ! flag_trapping_math))
13753 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13754 ? integer_zero_node
13755 : integer_one_node;
13756 return omit_one_operand_loc (loc, type, t1, arg0);
13759 /* Simplify unordered comparison of something with itself. */
13760 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13761 && operand_equal_p (arg0, arg1, 0))
13762 return constant_boolean_node (1, type);
13764 if (code == LTGT_EXPR
13765 && !flag_trapping_math
13766 && operand_equal_p (arg0, arg1, 0))
13767 return constant_boolean_node (0, type);
13769 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13771 tree targ0 = strip_float_extensions (arg0);
13772 tree targ1 = strip_float_extensions (arg1);
13773 tree newtype = TREE_TYPE (targ0);
13775 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13776 newtype = TREE_TYPE (targ1);
13778 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13779 return fold_build2_loc (loc, code, type,
13780 fold_convert_loc (loc, newtype, targ0),
13781 fold_convert_loc (loc, newtype, targ1));
13784 return NULL_TREE;
13786 case COMPOUND_EXPR:
13787 /* When pedantic, a compound expression can be neither an lvalue
13788 nor an integer constant expression. */
13789 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13790 return NULL_TREE;
13791 /* Don't let (0, 0) be null pointer constant. */
13792 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13793 : fold_convert_loc (loc, type, arg1);
13794 return pedantic_non_lvalue_loc (loc, tem);
13796 case COMPLEX_EXPR:
13797 if ((TREE_CODE (arg0) == REAL_CST
13798 && TREE_CODE (arg1) == REAL_CST)
13799 || (TREE_CODE (arg0) == INTEGER_CST
13800 && TREE_CODE (arg1) == INTEGER_CST))
13801 return build_complex (type, arg0, arg1);
13802 if (TREE_CODE (arg0) == REALPART_EXPR
13803 && TREE_CODE (arg1) == IMAGPART_EXPR
13804 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13805 && operand_equal_p (TREE_OPERAND (arg0, 0),
13806 TREE_OPERAND (arg1, 0), 0))
13807 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13808 TREE_OPERAND (arg1, 0));
13809 return NULL_TREE;
13811 case ASSERT_EXPR:
13812 /* An ASSERT_EXPR should never be passed to fold_binary. */
13813 gcc_unreachable ();
13815 case VEC_PACK_TRUNC_EXPR:
13816 case VEC_PACK_FIX_TRUNC_EXPR:
13818 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13819 tree *elts;
13821 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13822 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13823 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13824 return NULL_TREE;
13826 elts = XALLOCAVEC (tree, nelts);
13827 if (!vec_cst_ctor_to_array (arg0, elts)
13828 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13829 return NULL_TREE;
13831 for (i = 0; i < nelts; i++)
13833 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13834 ? NOP_EXPR : FIX_TRUNC_EXPR,
13835 TREE_TYPE (type), elts[i]);
13836 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13837 return NULL_TREE;
13840 return build_vector (type, elts);
13843 case VEC_WIDEN_MULT_LO_EXPR:
13844 case VEC_WIDEN_MULT_HI_EXPR:
13845 case VEC_WIDEN_MULT_EVEN_EXPR:
13846 case VEC_WIDEN_MULT_ODD_EXPR:
13848 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13849 unsigned int out, ofs, scale;
13850 tree *elts;
13852 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13853 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13854 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13855 return NULL_TREE;
13857 elts = XALLOCAVEC (tree, nelts * 4);
13858 if (!vec_cst_ctor_to_array (arg0, elts)
13859 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13860 return NULL_TREE;
13862 if (code == VEC_WIDEN_MULT_LO_EXPR)
13863 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13864 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13865 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13866 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13867 scale = 1, ofs = 0;
13868 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13869 scale = 1, ofs = 1;
13871 for (out = 0; out < nelts; out++)
13873 unsigned int in1 = (out << scale) + ofs;
13874 unsigned int in2 = in1 + nelts * 2;
13875 tree t1, t2;
13877 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13878 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13880 if (t1 == NULL_TREE || t2 == NULL_TREE)
13881 return NULL_TREE;
13882 elts[out] = const_binop (MULT_EXPR, t1, t2);
13883 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13884 return NULL_TREE;
13887 return build_vector (type, elts);
13890 default:
13891 return NULL_TREE;
13892 } /* switch (code) */
13895 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13896 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13897 of GOTO_EXPR. */
13899 static tree
13900 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13902 switch (TREE_CODE (*tp))
13904 case LABEL_EXPR:
13905 return *tp;
13907 case GOTO_EXPR:
13908 *walk_subtrees = 0;
13910 /* ... fall through ... */
13912 default:
13913 return NULL_TREE;
13917 /* Return whether the sub-tree ST contains a label which is accessible from
13918 outside the sub-tree. */
13920 static bool
13921 contains_label_p (tree st)
13923 return
13924 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13927 /* Fold a ternary expression of code CODE and type TYPE with operands
13928 OP0, OP1, and OP2. Return the folded expression if folding is
13929 successful. Otherwise, return NULL_TREE. */
13931 tree
13932 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13933 tree op0, tree op1, tree op2)
13935 tree tem;
13936 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13937 enum tree_code_class kind = TREE_CODE_CLASS (code);
13939 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13940 && TREE_CODE_LENGTH (code) == 3);
13942 /* Strip any conversions that don't change the mode. This is safe
13943 for every expression, except for a comparison expression because
13944 its signedness is derived from its operands. So, in the latter
13945 case, only strip conversions that don't change the signedness.
13947 Note that this is done as an internal manipulation within the
13948 constant folder, in order to find the simplest representation of
13949 the arguments so that their form can be studied. In any cases,
13950 the appropriate type conversions should be put back in the tree
13951 that will get out of the constant folder. */
13952 if (op0)
13954 arg0 = op0;
13955 STRIP_NOPS (arg0);
13958 if (op1)
13960 arg1 = op1;
13961 STRIP_NOPS (arg1);
13964 if (op2)
13966 arg2 = op2;
13967 STRIP_NOPS (arg2);
13970 switch (code)
13972 case COMPONENT_REF:
13973 if (TREE_CODE (arg0) == CONSTRUCTOR
13974 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13976 unsigned HOST_WIDE_INT idx;
13977 tree field, value;
13978 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13979 if (field == arg1)
13980 return value;
13982 return NULL_TREE;
13984 case COND_EXPR:
13985 case VEC_COND_EXPR:
13986 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13987 so all simple results must be passed through pedantic_non_lvalue. */
13988 if (TREE_CODE (arg0) == INTEGER_CST)
13990 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13991 tem = integer_zerop (arg0) ? op2 : op1;
13992 /* Only optimize constant conditions when the selected branch
13993 has the same type as the COND_EXPR. This avoids optimizing
13994 away "c ? x : throw", where the throw has a void type.
13995 Avoid throwing away that operand which contains label. */
13996 if ((!TREE_SIDE_EFFECTS (unused_op)
13997 || !contains_label_p (unused_op))
13998 && (! VOID_TYPE_P (TREE_TYPE (tem))
13999 || VOID_TYPE_P (type)))
14000 return pedantic_non_lvalue_loc (loc, tem);
14001 return NULL_TREE;
14003 else if (TREE_CODE (arg0) == VECTOR_CST)
14005 if (integer_all_onesp (arg0))
14006 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14007 if (integer_zerop (arg0))
14008 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14010 if ((TREE_CODE (arg1) == VECTOR_CST
14011 || TREE_CODE (arg1) == CONSTRUCTOR)
14012 && (TREE_CODE (arg2) == VECTOR_CST
14013 || TREE_CODE (arg2) == CONSTRUCTOR))
14015 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14016 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14017 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14018 for (i = 0; i < nelts; i++)
14020 tree val = VECTOR_CST_ELT (arg0, i);
14021 if (integer_all_onesp (val))
14022 sel[i] = i;
14023 else if (integer_zerop (val))
14024 sel[i] = nelts + i;
14025 else /* Currently unreachable. */
14026 return NULL_TREE;
14028 tree t = fold_vec_perm (type, arg1, arg2, sel);
14029 if (t != NULL_TREE)
14030 return t;
14034 if (operand_equal_p (arg1, op2, 0))
14035 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14037 /* If we have A op B ? A : C, we may be able to convert this to a
14038 simpler expression, depending on the operation and the values
14039 of B and C. Signed zeros prevent all of these transformations,
14040 for reasons given above each one.
14042 Also try swapping the arguments and inverting the conditional. */
14043 if (COMPARISON_CLASS_P (arg0)
14044 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14045 arg1, TREE_OPERAND (arg0, 1))
14046 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14048 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14049 if (tem)
14050 return tem;
14053 if (COMPARISON_CLASS_P (arg0)
14054 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14055 op2,
14056 TREE_OPERAND (arg0, 1))
14057 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14059 location_t loc0 = expr_location_or (arg0, loc);
14060 tem = fold_invert_truthvalue (loc0, arg0);
14061 if (tem && COMPARISON_CLASS_P (tem))
14063 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14064 if (tem)
14065 return tem;
14069 /* If the second operand is simpler than the third, swap them
14070 since that produces better jump optimization results. */
14071 if (truth_value_p (TREE_CODE (arg0))
14072 && tree_swap_operands_p (op1, op2, false))
14074 location_t loc0 = expr_location_or (arg0, loc);
14075 /* See if this can be inverted. If it can't, possibly because
14076 it was a floating-point inequality comparison, don't do
14077 anything. */
14078 tem = fold_invert_truthvalue (loc0, arg0);
14079 if (tem)
14080 return fold_build3_loc (loc, code, type, tem, op2, op1);
14083 /* Convert A ? 1 : 0 to simply A. */
14084 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14085 : (integer_onep (op1)
14086 && !VECTOR_TYPE_P (type)))
14087 && integer_zerop (op2)
14088 /* If we try to convert OP0 to our type, the
14089 call to fold will try to move the conversion inside
14090 a COND, which will recurse. In that case, the COND_EXPR
14091 is probably the best choice, so leave it alone. */
14092 && type == TREE_TYPE (arg0))
14093 return pedantic_non_lvalue_loc (loc, arg0);
14095 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14096 over COND_EXPR in cases such as floating point comparisons. */
14097 if (integer_zerop (op1)
14098 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14099 : (integer_onep (op2)
14100 && !VECTOR_TYPE_P (type)))
14101 && truth_value_p (TREE_CODE (arg0)))
14102 return pedantic_non_lvalue_loc (loc,
14103 fold_convert_loc (loc, type,
14104 invert_truthvalue_loc (loc,
14105 arg0)));
14107 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14108 if (TREE_CODE (arg0) == LT_EXPR
14109 && integer_zerop (TREE_OPERAND (arg0, 1))
14110 && integer_zerop (op2)
14111 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14113 /* sign_bit_p only checks ARG1 bits within A's precision.
14114 If <sign bit of A> has wider type than A, bits outside
14115 of A's precision in <sign bit of A> need to be checked.
14116 If they are all 0, this optimization needs to be done
14117 in unsigned A's type, if they are all 1 in signed A's type,
14118 otherwise this can't be done. */
14119 if (TYPE_PRECISION (TREE_TYPE (tem))
14120 < TYPE_PRECISION (TREE_TYPE (arg1))
14121 && TYPE_PRECISION (TREE_TYPE (tem))
14122 < TYPE_PRECISION (type))
14124 unsigned HOST_WIDE_INT mask_lo;
14125 HOST_WIDE_INT mask_hi;
14126 int inner_width, outer_width;
14127 tree tem_type;
14129 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14130 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14131 if (outer_width > TYPE_PRECISION (type))
14132 outer_width = TYPE_PRECISION (type);
14134 if (outer_width > HOST_BITS_PER_WIDE_INT)
14136 mask_hi = ((unsigned HOST_WIDE_INT) -1
14137 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14138 mask_lo = -1;
14140 else
14142 mask_hi = 0;
14143 mask_lo = ((unsigned HOST_WIDE_INT) -1
14144 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14146 if (inner_width > HOST_BITS_PER_WIDE_INT)
14148 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14149 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14150 mask_lo = 0;
14152 else
14153 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14154 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14156 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14157 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14159 tem_type = signed_type_for (TREE_TYPE (tem));
14160 tem = fold_convert_loc (loc, tem_type, tem);
14162 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14163 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14165 tem_type = unsigned_type_for (TREE_TYPE (tem));
14166 tem = fold_convert_loc (loc, tem_type, tem);
14168 else
14169 tem = NULL;
14172 if (tem)
14173 return
14174 fold_convert_loc (loc, type,
14175 fold_build2_loc (loc, BIT_AND_EXPR,
14176 TREE_TYPE (tem), tem,
14177 fold_convert_loc (loc,
14178 TREE_TYPE (tem),
14179 arg1)));
14182 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14183 already handled above. */
14184 if (TREE_CODE (arg0) == BIT_AND_EXPR
14185 && integer_onep (TREE_OPERAND (arg0, 1))
14186 && integer_zerop (op2)
14187 && integer_pow2p (arg1))
14189 tree tem = TREE_OPERAND (arg0, 0);
14190 STRIP_NOPS (tem);
14191 if (TREE_CODE (tem) == RSHIFT_EXPR
14192 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14193 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14194 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14195 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14196 TREE_OPERAND (tem, 0), arg1);
14199 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14200 is probably obsolete because the first operand should be a
14201 truth value (that's why we have the two cases above), but let's
14202 leave it in until we can confirm this for all front-ends. */
14203 if (integer_zerop (op2)
14204 && TREE_CODE (arg0) == NE_EXPR
14205 && integer_zerop (TREE_OPERAND (arg0, 1))
14206 && integer_pow2p (arg1)
14207 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14208 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14209 arg1, OEP_ONLY_CONST))
14210 return pedantic_non_lvalue_loc (loc,
14211 fold_convert_loc (loc, type,
14212 TREE_OPERAND (arg0, 0)));
14214 /* Disable the transformations below for vectors, since
14215 fold_binary_op_with_conditional_arg may undo them immediately,
14216 yielding an infinite loop. */
14217 if (code == VEC_COND_EXPR)
14218 return NULL_TREE;
14220 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14221 if (integer_zerop (op2)
14222 && truth_value_p (TREE_CODE (arg0))
14223 && truth_value_p (TREE_CODE (arg1))
14224 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14225 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14226 : TRUTH_ANDIF_EXPR,
14227 type, fold_convert_loc (loc, type, arg0), arg1);
14229 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14230 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14231 && truth_value_p (TREE_CODE (arg0))
14232 && truth_value_p (TREE_CODE (arg1))
14233 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14235 location_t loc0 = expr_location_or (arg0, loc);
14236 /* Only perform transformation if ARG0 is easily inverted. */
14237 tem = fold_invert_truthvalue (loc0, arg0);
14238 if (tem)
14239 return fold_build2_loc (loc, code == VEC_COND_EXPR
14240 ? BIT_IOR_EXPR
14241 : TRUTH_ORIF_EXPR,
14242 type, fold_convert_loc (loc, type, tem),
14243 arg1);
14246 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14247 if (integer_zerop (arg1)
14248 && truth_value_p (TREE_CODE (arg0))
14249 && truth_value_p (TREE_CODE (op2))
14250 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14252 location_t loc0 = expr_location_or (arg0, loc);
14253 /* Only perform transformation if ARG0 is easily inverted. */
14254 tem = fold_invert_truthvalue (loc0, arg0);
14255 if (tem)
14256 return fold_build2_loc (loc, code == VEC_COND_EXPR
14257 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14258 type, fold_convert_loc (loc, type, tem),
14259 op2);
14262 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14263 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14264 && truth_value_p (TREE_CODE (arg0))
14265 && truth_value_p (TREE_CODE (op2))
14266 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14267 return fold_build2_loc (loc, code == VEC_COND_EXPR
14268 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14269 type, fold_convert_loc (loc, type, arg0), op2);
14271 return NULL_TREE;
14273 case CALL_EXPR:
14274 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14275 of fold_ternary on them. */
14276 gcc_unreachable ();
14278 case BIT_FIELD_REF:
14279 if ((TREE_CODE (arg0) == VECTOR_CST
14280 || (TREE_CODE (arg0) == CONSTRUCTOR
14281 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14282 && (type == TREE_TYPE (TREE_TYPE (arg0))
14283 || (TREE_CODE (type) == VECTOR_TYPE
14284 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14286 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14287 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14288 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14289 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14291 if (n != 0
14292 && (idx % width) == 0
14293 && (n % width) == 0
14294 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14296 idx = idx / width;
14297 n = n / width;
14299 if (TREE_CODE (arg0) == VECTOR_CST)
14301 if (n == 1)
14302 return VECTOR_CST_ELT (arg0, idx);
14304 tree *vals = XALLOCAVEC (tree, n);
14305 for (unsigned i = 0; i < n; ++i)
14306 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14307 return build_vector (type, vals);
14310 /* Constructor elements can be subvectors. */
14311 unsigned HOST_WIDE_INT k = 1;
14312 if (CONSTRUCTOR_NELTS (arg0) != 0)
14314 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14315 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14316 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14319 /* We keep an exact subset of the constructor elements. */
14320 if ((idx % k) == 0 && (n % k) == 0)
14322 if (CONSTRUCTOR_NELTS (arg0) == 0)
14323 return build_constructor (type, NULL);
14324 idx /= k;
14325 n /= k;
14326 if (n == 1)
14328 if (idx < CONSTRUCTOR_NELTS (arg0))
14329 return CONSTRUCTOR_ELT (arg0, idx)->value;
14330 return build_zero_cst (type);
14333 vec<constructor_elt, va_gc> *vals;
14334 vec_alloc (vals, n);
14335 for (unsigned i = 0;
14336 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14337 ++i)
14338 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14339 CONSTRUCTOR_ELT
14340 (arg0, idx + i)->value);
14341 return build_constructor (type, vals);
14343 /* The bitfield references a single constructor element. */
14344 else if (idx + n <= (idx / k + 1) * k)
14346 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14347 return build_zero_cst (type);
14348 else if (n == k)
14349 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14350 else
14351 return fold_build3_loc (loc, code, type,
14352 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14353 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14358 /* A bit-field-ref that referenced the full argument can be stripped. */
14359 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14360 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14361 && integer_zerop (op2))
14362 return fold_convert_loc (loc, type, arg0);
14364 /* On constants we can use native encode/interpret to constant
14365 fold (nearly) all BIT_FIELD_REFs. */
14366 if (CONSTANT_CLASS_P (arg0)
14367 && can_native_interpret_type_p (type)
14368 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14369 /* This limitation should not be necessary, we just need to
14370 round this up to mode size. */
14371 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14372 /* Need bit-shifting of the buffer to relax the following. */
14373 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14375 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14376 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14377 unsigned HOST_WIDE_INT clen;
14378 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14379 /* ??? We cannot tell native_encode_expr to start at
14380 some random byte only. So limit us to a reasonable amount
14381 of work. */
14382 if (clen <= 4096)
14384 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14385 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14386 if (len > 0
14387 && len * BITS_PER_UNIT >= bitpos + bitsize)
14389 tree v = native_interpret_expr (type,
14390 b + bitpos / BITS_PER_UNIT,
14391 bitsize / BITS_PER_UNIT);
14392 if (v)
14393 return v;
14398 return NULL_TREE;
14400 case FMA_EXPR:
14401 /* For integers we can decompose the FMA if possible. */
14402 if (TREE_CODE (arg0) == INTEGER_CST
14403 && TREE_CODE (arg1) == INTEGER_CST)
14404 return fold_build2_loc (loc, PLUS_EXPR, type,
14405 const_binop (MULT_EXPR, arg0, arg1), arg2);
14406 if (integer_zerop (arg2))
14407 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14409 return fold_fma (loc, type, arg0, arg1, arg2);
14411 case VEC_PERM_EXPR:
14412 if (TREE_CODE (arg2) == VECTOR_CST)
14414 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14415 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14416 tree t;
14417 bool need_mask_canon = false;
14418 bool all_in_vec0 = true;
14419 bool all_in_vec1 = true;
14420 bool maybe_identity = true;
14421 bool single_arg = (op0 == op1);
14422 bool changed = false;
14424 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14425 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14426 for (i = 0; i < nelts; i++)
14428 tree val = VECTOR_CST_ELT (arg2, i);
14429 if (TREE_CODE (val) != INTEGER_CST)
14430 return NULL_TREE;
14432 sel[i] = TREE_INT_CST_LOW (val) & mask;
14433 if (TREE_INT_CST_HIGH (val)
14434 || ((unsigned HOST_WIDE_INT)
14435 TREE_INT_CST_LOW (val) != sel[i]))
14436 need_mask_canon = true;
14438 if (sel[i] < nelts)
14439 all_in_vec1 = false;
14440 else
14441 all_in_vec0 = false;
14443 if ((sel[i] & (nelts-1)) != i)
14444 maybe_identity = false;
14447 if (maybe_identity)
14449 if (all_in_vec0)
14450 return op0;
14451 if (all_in_vec1)
14452 return op1;
14455 if (all_in_vec0)
14456 op1 = op0;
14457 else if (all_in_vec1)
14459 op0 = op1;
14460 for (i = 0; i < nelts; i++)
14461 sel[i] -= nelts;
14462 need_mask_canon = true;
14465 if ((TREE_CODE (op0) == VECTOR_CST
14466 || TREE_CODE (op0) == CONSTRUCTOR)
14467 && (TREE_CODE (op1) == VECTOR_CST
14468 || TREE_CODE (op1) == CONSTRUCTOR))
14470 t = fold_vec_perm (type, op0, op1, sel);
14471 if (t != NULL_TREE)
14472 return t;
14475 if (op0 == op1 && !single_arg)
14476 changed = true;
14478 if (need_mask_canon && arg2 == op2)
14480 tree *tsel = XALLOCAVEC (tree, nelts);
14481 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14482 for (i = 0; i < nelts; i++)
14483 tsel[i] = build_int_cst (eltype, sel[i]);
14484 op2 = build_vector (TREE_TYPE (arg2), tsel);
14485 changed = true;
14488 if (changed)
14489 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14491 return NULL_TREE;
14493 default:
14494 return NULL_TREE;
14495 } /* switch (code) */
14498 /* Perform constant folding and related simplification of EXPR.
14499 The related simplifications include x*1 => x, x*0 => 0, etc.,
14500 and application of the associative law.
14501 NOP_EXPR conversions may be removed freely (as long as we
14502 are careful not to change the type of the overall expression).
14503 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14504 but we can constant-fold them if they have constant operands. */
14506 #ifdef ENABLE_FOLD_CHECKING
14507 # define fold(x) fold_1 (x)
14508 static tree fold_1 (tree);
14509 static
14510 #endif
14511 tree
14512 fold (tree expr)
14514 const tree t = expr;
14515 enum tree_code code = TREE_CODE (t);
14516 enum tree_code_class kind = TREE_CODE_CLASS (code);
14517 tree tem;
14518 location_t loc = EXPR_LOCATION (expr);
14520 /* Return right away if a constant. */
14521 if (kind == tcc_constant)
14522 return t;
14524 /* CALL_EXPR-like objects with variable numbers of operands are
14525 treated specially. */
14526 if (kind == tcc_vl_exp)
14528 if (code == CALL_EXPR)
14530 tem = fold_call_expr (loc, expr, false);
14531 return tem ? tem : expr;
14533 return expr;
14536 if (IS_EXPR_CODE_CLASS (kind))
14538 tree type = TREE_TYPE (t);
14539 tree op0, op1, op2;
14541 switch (TREE_CODE_LENGTH (code))
14543 case 1:
14544 op0 = TREE_OPERAND (t, 0);
14545 tem = fold_unary_loc (loc, code, type, op0);
14546 return tem ? tem : expr;
14547 case 2:
14548 op0 = TREE_OPERAND (t, 0);
14549 op1 = TREE_OPERAND (t, 1);
14550 tem = fold_binary_loc (loc, code, type, op0, op1);
14551 return tem ? tem : expr;
14552 case 3:
14553 op0 = TREE_OPERAND (t, 0);
14554 op1 = TREE_OPERAND (t, 1);
14555 op2 = TREE_OPERAND (t, 2);
14556 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14557 return tem ? tem : expr;
14558 default:
14559 break;
14563 switch (code)
14565 case ARRAY_REF:
14567 tree op0 = TREE_OPERAND (t, 0);
14568 tree op1 = TREE_OPERAND (t, 1);
14570 if (TREE_CODE (op1) == INTEGER_CST
14571 && TREE_CODE (op0) == CONSTRUCTOR
14572 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14574 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14575 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14576 unsigned HOST_WIDE_INT begin = 0;
14578 /* Find a matching index by means of a binary search. */
14579 while (begin != end)
14581 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14582 tree index = (*elts)[middle].index;
14584 if (TREE_CODE (index) == INTEGER_CST
14585 && tree_int_cst_lt (index, op1))
14586 begin = middle + 1;
14587 else if (TREE_CODE (index) == INTEGER_CST
14588 && tree_int_cst_lt (op1, index))
14589 end = middle;
14590 else if (TREE_CODE (index) == RANGE_EXPR
14591 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14592 begin = middle + 1;
14593 else if (TREE_CODE (index) == RANGE_EXPR
14594 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14595 end = middle;
14596 else
14597 return (*elts)[middle].value;
14601 return t;
14604 /* Return a VECTOR_CST if possible. */
14605 case CONSTRUCTOR:
14607 tree type = TREE_TYPE (t);
14608 if (TREE_CODE (type) != VECTOR_TYPE)
14609 return t;
14611 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14612 unsigned HOST_WIDE_INT idx, pos = 0;
14613 tree value;
14615 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14617 if (!CONSTANT_CLASS_P (value))
14618 return t;
14619 if (TREE_CODE (value) == VECTOR_CST)
14621 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14622 vec[pos++] = VECTOR_CST_ELT (value, i);
14624 else
14625 vec[pos++] = value;
14627 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14628 vec[pos] = build_zero_cst (TREE_TYPE (type));
14630 return build_vector (type, vec);
14633 case CONST_DECL:
14634 return fold (DECL_INITIAL (t));
14636 default:
14637 return t;
14638 } /* switch (code) */
14641 #ifdef ENABLE_FOLD_CHECKING
14642 #undef fold
14644 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14645 hash_table <pointer_hash <tree_node> >);
14646 static void fold_check_failed (const_tree, const_tree);
14647 void print_fold_checksum (const_tree);
14649 /* When --enable-checking=fold, compute a digest of expr before
14650 and after actual fold call to see if fold did not accidentally
14651 change original expr. */
14653 tree
14654 fold (tree expr)
14656 tree ret;
14657 struct md5_ctx ctx;
14658 unsigned char checksum_before[16], checksum_after[16];
14659 hash_table <pointer_hash <tree_node> > ht;
14661 ht.create (32);
14662 md5_init_ctx (&ctx);
14663 fold_checksum_tree (expr, &ctx, ht);
14664 md5_finish_ctx (&ctx, checksum_before);
14665 ht.empty ();
14667 ret = fold_1 (expr);
14669 md5_init_ctx (&ctx);
14670 fold_checksum_tree (expr, &ctx, ht);
14671 md5_finish_ctx (&ctx, checksum_after);
14672 ht.dispose ();
14674 if (memcmp (checksum_before, checksum_after, 16))
14675 fold_check_failed (expr, ret);
14677 return ret;
14680 void
14681 print_fold_checksum (const_tree expr)
14683 struct md5_ctx ctx;
14684 unsigned char checksum[16], cnt;
14685 hash_table <pointer_hash <tree_node> > ht;
14687 ht.create (32);
14688 md5_init_ctx (&ctx);
14689 fold_checksum_tree (expr, &ctx, ht);
14690 md5_finish_ctx (&ctx, checksum);
14691 ht.dispose ();
14692 for (cnt = 0; cnt < 16; ++cnt)
14693 fprintf (stderr, "%02x", checksum[cnt]);
14694 putc ('\n', stderr);
14697 static void
14698 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14700 internal_error ("fold check: original tree changed by fold");
14703 static void
14704 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14705 hash_table <pointer_hash <tree_node> > ht)
14707 tree_node **slot;
14708 enum tree_code code;
14709 union tree_node buf;
14710 int i, len;
14712 recursive_label:
14713 if (expr == NULL)
14714 return;
14715 slot = ht.find_slot (expr, INSERT);
14716 if (*slot != NULL)
14717 return;
14718 *slot = CONST_CAST_TREE (expr);
14719 code = TREE_CODE (expr);
14720 if (TREE_CODE_CLASS (code) == tcc_declaration
14721 && DECL_ASSEMBLER_NAME_SET_P (expr))
14723 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14724 memcpy ((char *) &buf, expr, tree_size (expr));
14725 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14726 expr = (tree) &buf;
14728 else if (TREE_CODE_CLASS (code) == tcc_type
14729 && (TYPE_POINTER_TO (expr)
14730 || TYPE_REFERENCE_TO (expr)
14731 || TYPE_CACHED_VALUES_P (expr)
14732 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14733 || TYPE_NEXT_VARIANT (expr)))
14735 /* Allow these fields to be modified. */
14736 tree tmp;
14737 memcpy ((char *) &buf, expr, tree_size (expr));
14738 expr = tmp = (tree) &buf;
14739 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14740 TYPE_POINTER_TO (tmp) = NULL;
14741 TYPE_REFERENCE_TO (tmp) = NULL;
14742 TYPE_NEXT_VARIANT (tmp) = NULL;
14743 if (TYPE_CACHED_VALUES_P (tmp))
14745 TYPE_CACHED_VALUES_P (tmp) = 0;
14746 TYPE_CACHED_VALUES (tmp) = NULL;
14749 md5_process_bytes (expr, tree_size (expr), ctx);
14750 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14751 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14752 if (TREE_CODE_CLASS (code) != tcc_type
14753 && TREE_CODE_CLASS (code) != tcc_declaration
14754 && code != TREE_LIST
14755 && code != SSA_NAME
14756 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14757 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14758 switch (TREE_CODE_CLASS (code))
14760 case tcc_constant:
14761 switch (code)
14763 case STRING_CST:
14764 md5_process_bytes (TREE_STRING_POINTER (expr),
14765 TREE_STRING_LENGTH (expr), ctx);
14766 break;
14767 case COMPLEX_CST:
14768 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14769 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14770 break;
14771 case VECTOR_CST:
14772 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14773 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14774 break;
14775 default:
14776 break;
14778 break;
14779 case tcc_exceptional:
14780 switch (code)
14782 case TREE_LIST:
14783 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14784 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14785 expr = TREE_CHAIN (expr);
14786 goto recursive_label;
14787 break;
14788 case TREE_VEC:
14789 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14790 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14791 break;
14792 default:
14793 break;
14795 break;
14796 case tcc_expression:
14797 case tcc_reference:
14798 case tcc_comparison:
14799 case tcc_unary:
14800 case tcc_binary:
14801 case tcc_statement:
14802 case tcc_vl_exp:
14803 len = TREE_OPERAND_LENGTH (expr);
14804 for (i = 0; i < len; ++i)
14805 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14806 break;
14807 case tcc_declaration:
14808 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14809 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14810 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14812 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14813 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14814 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14815 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14816 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14818 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14819 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14821 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14823 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14824 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14825 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14827 break;
14828 case tcc_type:
14829 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14830 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14831 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14832 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14833 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14834 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14835 if (INTEGRAL_TYPE_P (expr)
14836 || SCALAR_FLOAT_TYPE_P (expr))
14838 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14839 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14841 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14842 if (TREE_CODE (expr) == RECORD_TYPE
14843 || TREE_CODE (expr) == UNION_TYPE
14844 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14845 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14846 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14847 break;
14848 default:
14849 break;
14853 /* Helper function for outputting the checksum of a tree T. When
14854 debugging with gdb, you can "define mynext" to be "next" followed
14855 by "call debug_fold_checksum (op0)", then just trace down till the
14856 outputs differ. */
14858 DEBUG_FUNCTION void
14859 debug_fold_checksum (const_tree t)
14861 int i;
14862 unsigned char checksum[16];
14863 struct md5_ctx ctx;
14864 hash_table <pointer_hash <tree_node> > ht;
14865 ht.create (32);
14867 md5_init_ctx (&ctx);
14868 fold_checksum_tree (t, &ctx, ht);
14869 md5_finish_ctx (&ctx, checksum);
14870 ht.empty ();
14872 for (i = 0; i < 16; i++)
14873 fprintf (stderr, "%d ", checksum[i]);
14875 fprintf (stderr, "\n");
14878 #endif
14880 /* Fold a unary tree expression with code CODE of type TYPE with an
14881 operand OP0. LOC is the location of the resulting expression.
14882 Return a folded expression if successful. Otherwise, return a tree
14883 expression with code CODE of type TYPE with an operand OP0. */
14885 tree
14886 fold_build1_stat_loc (location_t loc,
14887 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14889 tree tem;
14890 #ifdef ENABLE_FOLD_CHECKING
14891 unsigned char checksum_before[16], checksum_after[16];
14892 struct md5_ctx ctx;
14893 hash_table <pointer_hash <tree_node> > ht;
14895 ht.create (32);
14896 md5_init_ctx (&ctx);
14897 fold_checksum_tree (op0, &ctx, ht);
14898 md5_finish_ctx (&ctx, checksum_before);
14899 ht.empty ();
14900 #endif
14902 tem = fold_unary_loc (loc, code, type, op0);
14903 if (!tem)
14904 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14906 #ifdef ENABLE_FOLD_CHECKING
14907 md5_init_ctx (&ctx);
14908 fold_checksum_tree (op0, &ctx, ht);
14909 md5_finish_ctx (&ctx, checksum_after);
14910 ht.dispose ();
14912 if (memcmp (checksum_before, checksum_after, 16))
14913 fold_check_failed (op0, tem);
14914 #endif
14915 return tem;
14918 /* Fold a binary tree expression with code CODE of type TYPE with
14919 operands OP0 and OP1. LOC is the location of the resulting
14920 expression. Return a folded expression if successful. Otherwise,
14921 return a tree expression with code CODE of type TYPE with operands
14922 OP0 and OP1. */
14924 tree
14925 fold_build2_stat_loc (location_t loc,
14926 enum tree_code code, tree type, tree op0, tree op1
14927 MEM_STAT_DECL)
14929 tree tem;
14930 #ifdef ENABLE_FOLD_CHECKING
14931 unsigned char checksum_before_op0[16],
14932 checksum_before_op1[16],
14933 checksum_after_op0[16],
14934 checksum_after_op1[16];
14935 struct md5_ctx ctx;
14936 hash_table <pointer_hash <tree_node> > ht;
14938 ht.create (32);
14939 md5_init_ctx (&ctx);
14940 fold_checksum_tree (op0, &ctx, ht);
14941 md5_finish_ctx (&ctx, checksum_before_op0);
14942 ht.empty ();
14944 md5_init_ctx (&ctx);
14945 fold_checksum_tree (op1, &ctx, ht);
14946 md5_finish_ctx (&ctx, checksum_before_op1);
14947 ht.empty ();
14948 #endif
14950 tem = fold_binary_loc (loc, code, type, op0, op1);
14951 if (!tem)
14952 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14954 #ifdef ENABLE_FOLD_CHECKING
14955 md5_init_ctx (&ctx);
14956 fold_checksum_tree (op0, &ctx, ht);
14957 md5_finish_ctx (&ctx, checksum_after_op0);
14958 ht.empty ();
14960 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14961 fold_check_failed (op0, tem);
14963 md5_init_ctx (&ctx);
14964 fold_checksum_tree (op1, &ctx, ht);
14965 md5_finish_ctx (&ctx, checksum_after_op1);
14966 ht.dispose ();
14968 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14969 fold_check_failed (op1, tem);
14970 #endif
14971 return tem;
14974 /* Fold a ternary tree expression with code CODE of type TYPE with
14975 operands OP0, OP1, and OP2. Return a folded expression if
14976 successful. Otherwise, return a tree expression with code CODE of
14977 type TYPE with operands OP0, OP1, and OP2. */
14979 tree
14980 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14981 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14983 tree tem;
14984 #ifdef ENABLE_FOLD_CHECKING
14985 unsigned char checksum_before_op0[16],
14986 checksum_before_op1[16],
14987 checksum_before_op2[16],
14988 checksum_after_op0[16],
14989 checksum_after_op1[16],
14990 checksum_after_op2[16];
14991 struct md5_ctx ctx;
14992 hash_table <pointer_hash <tree_node> > ht;
14994 ht.create (32);
14995 md5_init_ctx (&ctx);
14996 fold_checksum_tree (op0, &ctx, ht);
14997 md5_finish_ctx (&ctx, checksum_before_op0);
14998 ht.empty ();
15000 md5_init_ctx (&ctx);
15001 fold_checksum_tree (op1, &ctx, ht);
15002 md5_finish_ctx (&ctx, checksum_before_op1);
15003 ht.empty ();
15005 md5_init_ctx (&ctx);
15006 fold_checksum_tree (op2, &ctx, ht);
15007 md5_finish_ctx (&ctx, checksum_before_op2);
15008 ht.empty ();
15009 #endif
15011 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15012 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15013 if (!tem)
15014 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15016 #ifdef ENABLE_FOLD_CHECKING
15017 md5_init_ctx (&ctx);
15018 fold_checksum_tree (op0, &ctx, ht);
15019 md5_finish_ctx (&ctx, checksum_after_op0);
15020 ht.empty ();
15022 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15023 fold_check_failed (op0, tem);
15025 md5_init_ctx (&ctx);
15026 fold_checksum_tree (op1, &ctx, ht);
15027 md5_finish_ctx (&ctx, checksum_after_op1);
15028 ht.empty ();
15030 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15031 fold_check_failed (op1, tem);
15033 md5_init_ctx (&ctx);
15034 fold_checksum_tree (op2, &ctx, ht);
15035 md5_finish_ctx (&ctx, checksum_after_op2);
15036 ht.dispose ();
15038 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15039 fold_check_failed (op2, tem);
15040 #endif
15041 return tem;
15044 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15045 arguments in ARGARRAY, and a null static chain.
15046 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15047 of type TYPE from the given operands as constructed by build_call_array. */
15049 tree
15050 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15051 int nargs, tree *argarray)
15053 tree tem;
15054 #ifdef ENABLE_FOLD_CHECKING
15055 unsigned char checksum_before_fn[16],
15056 checksum_before_arglist[16],
15057 checksum_after_fn[16],
15058 checksum_after_arglist[16];
15059 struct md5_ctx ctx;
15060 hash_table <pointer_hash <tree_node> > ht;
15061 int i;
15063 ht.create (32);
15064 md5_init_ctx (&ctx);
15065 fold_checksum_tree (fn, &ctx, ht);
15066 md5_finish_ctx (&ctx, checksum_before_fn);
15067 ht.empty ();
15069 md5_init_ctx (&ctx);
15070 for (i = 0; i < nargs; i++)
15071 fold_checksum_tree (argarray[i], &ctx, ht);
15072 md5_finish_ctx (&ctx, checksum_before_arglist);
15073 ht.empty ();
15074 #endif
15076 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15078 #ifdef ENABLE_FOLD_CHECKING
15079 md5_init_ctx (&ctx);
15080 fold_checksum_tree (fn, &ctx, ht);
15081 md5_finish_ctx (&ctx, checksum_after_fn);
15082 ht.empty ();
15084 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15085 fold_check_failed (fn, tem);
15087 md5_init_ctx (&ctx);
15088 for (i = 0; i < nargs; i++)
15089 fold_checksum_tree (argarray[i], &ctx, ht);
15090 md5_finish_ctx (&ctx, checksum_after_arglist);
15091 ht.dispose ();
15093 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15094 fold_check_failed (NULL_TREE, tem);
15095 #endif
15096 return tem;
15099 /* Perform constant folding and related simplification of initializer
15100 expression EXPR. These behave identically to "fold_buildN" but ignore
15101 potential run-time traps and exceptions that fold must preserve. */
15103 #define START_FOLD_INIT \
15104 int saved_signaling_nans = flag_signaling_nans;\
15105 int saved_trapping_math = flag_trapping_math;\
15106 int saved_rounding_math = flag_rounding_math;\
15107 int saved_trapv = flag_trapv;\
15108 int saved_folding_initializer = folding_initializer;\
15109 flag_signaling_nans = 0;\
15110 flag_trapping_math = 0;\
15111 flag_rounding_math = 0;\
15112 flag_trapv = 0;\
15113 folding_initializer = 1;
15115 #define END_FOLD_INIT \
15116 flag_signaling_nans = saved_signaling_nans;\
15117 flag_trapping_math = saved_trapping_math;\
15118 flag_rounding_math = saved_rounding_math;\
15119 flag_trapv = saved_trapv;\
15120 folding_initializer = saved_folding_initializer;
15122 tree
15123 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15124 tree type, tree op)
15126 tree result;
15127 START_FOLD_INIT;
15129 result = fold_build1_loc (loc, code, type, op);
15131 END_FOLD_INIT;
15132 return result;
15135 tree
15136 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15137 tree type, tree op0, tree op1)
15139 tree result;
15140 START_FOLD_INIT;
15142 result = fold_build2_loc (loc, code, type, op0, op1);
15144 END_FOLD_INIT;
15145 return result;
15148 tree
15149 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15150 tree type, tree op0, tree op1, tree op2)
15152 tree result;
15153 START_FOLD_INIT;
15155 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15157 END_FOLD_INIT;
15158 return result;
15161 tree
15162 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15163 int nargs, tree *argarray)
15165 tree result;
15166 START_FOLD_INIT;
15168 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15170 END_FOLD_INIT;
15171 return result;
15174 #undef START_FOLD_INIT
15175 #undef END_FOLD_INIT
15177 /* Determine if first argument is a multiple of second argument. Return 0 if
15178 it is not, or we cannot easily determined it to be.
15180 An example of the sort of thing we care about (at this point; this routine
15181 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15182 fold cases do now) is discovering that
15184 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15186 is a multiple of
15188 SAVE_EXPR (J * 8)
15190 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15192 This code also handles discovering that
15194 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15196 is a multiple of 8 so we don't have to worry about dealing with a
15197 possible remainder.
15199 Note that we *look* inside a SAVE_EXPR only to determine how it was
15200 calculated; it is not safe for fold to do much of anything else with the
15201 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15202 at run time. For example, the latter example above *cannot* be implemented
15203 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15204 evaluation time of the original SAVE_EXPR is not necessarily the same at
15205 the time the new expression is evaluated. The only optimization of this
15206 sort that would be valid is changing
15208 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15210 divided by 8 to
15212 SAVE_EXPR (I) * SAVE_EXPR (J)
15214 (where the same SAVE_EXPR (J) is used in the original and the
15215 transformed version). */
15218 multiple_of_p (tree type, const_tree top, const_tree bottom)
15220 if (operand_equal_p (top, bottom, 0))
15221 return 1;
15223 if (TREE_CODE (type) != INTEGER_TYPE)
15224 return 0;
15226 switch (TREE_CODE (top))
15228 case BIT_AND_EXPR:
15229 /* Bitwise and provides a power of two multiple. If the mask is
15230 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15231 if (!integer_pow2p (bottom))
15232 return 0;
15233 /* FALLTHRU */
15235 case MULT_EXPR:
15236 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15237 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15239 case PLUS_EXPR:
15240 case MINUS_EXPR:
15241 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15242 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15244 case LSHIFT_EXPR:
15245 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15247 tree op1, t1;
15249 op1 = TREE_OPERAND (top, 1);
15250 /* const_binop may not detect overflow correctly,
15251 so check for it explicitly here. */
15252 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15253 > TREE_INT_CST_LOW (op1)
15254 && TREE_INT_CST_HIGH (op1) == 0
15255 && 0 != (t1 = fold_convert (type,
15256 const_binop (LSHIFT_EXPR,
15257 size_one_node,
15258 op1)))
15259 && !TREE_OVERFLOW (t1))
15260 return multiple_of_p (type, t1, bottom);
15262 return 0;
15264 case NOP_EXPR:
15265 /* Can't handle conversions from non-integral or wider integral type. */
15266 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15267 || (TYPE_PRECISION (type)
15268 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15269 return 0;
15271 /* .. fall through ... */
15273 case SAVE_EXPR:
15274 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15276 case COND_EXPR:
15277 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15278 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15280 case INTEGER_CST:
15281 if (TREE_CODE (bottom) != INTEGER_CST
15282 || integer_zerop (bottom)
15283 || (TYPE_UNSIGNED (type)
15284 && (tree_int_cst_sgn (top) < 0
15285 || tree_int_cst_sgn (bottom) < 0)))
15286 return 0;
15287 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15288 top, bottom));
15290 default:
15291 return 0;
15295 /* Return true if CODE or TYPE is known to be non-negative. */
15297 static bool
15298 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15300 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15301 && truth_value_p (code))
15302 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15303 have a signed:1 type (where the value is -1 and 0). */
15304 return true;
15305 return false;
15308 /* Return true if (CODE OP0) is known to be non-negative. If the return
15309 value is based on the assumption that signed overflow is undefined,
15310 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15311 *STRICT_OVERFLOW_P. */
15313 bool
15314 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15315 bool *strict_overflow_p)
15317 if (TYPE_UNSIGNED (type))
15318 return true;
15320 switch (code)
15322 case ABS_EXPR:
15323 /* We can't return 1 if flag_wrapv is set because
15324 ABS_EXPR<INT_MIN> = INT_MIN. */
15325 if (!INTEGRAL_TYPE_P (type))
15326 return true;
15327 if (TYPE_OVERFLOW_UNDEFINED (type))
15329 *strict_overflow_p = true;
15330 return true;
15332 break;
15334 case NON_LVALUE_EXPR:
15335 case FLOAT_EXPR:
15336 case FIX_TRUNC_EXPR:
15337 return tree_expr_nonnegative_warnv_p (op0,
15338 strict_overflow_p);
15340 case NOP_EXPR:
15342 tree inner_type = TREE_TYPE (op0);
15343 tree outer_type = type;
15345 if (TREE_CODE (outer_type) == REAL_TYPE)
15347 if (TREE_CODE (inner_type) == REAL_TYPE)
15348 return tree_expr_nonnegative_warnv_p (op0,
15349 strict_overflow_p);
15350 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15352 if (TYPE_UNSIGNED (inner_type))
15353 return true;
15354 return tree_expr_nonnegative_warnv_p (op0,
15355 strict_overflow_p);
15358 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15360 if (TREE_CODE (inner_type) == REAL_TYPE)
15361 return tree_expr_nonnegative_warnv_p (op0,
15362 strict_overflow_p);
15363 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15364 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15365 && TYPE_UNSIGNED (inner_type);
15368 break;
15370 default:
15371 return tree_simple_nonnegative_warnv_p (code, type);
15374 /* We don't know sign of `t', so be conservative and return false. */
15375 return false;
15378 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15379 value is based on the assumption that signed overflow is undefined,
15380 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15381 *STRICT_OVERFLOW_P. */
15383 bool
15384 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15385 tree op1, bool *strict_overflow_p)
15387 if (TYPE_UNSIGNED (type))
15388 return true;
15390 switch (code)
15392 case POINTER_PLUS_EXPR:
15393 case PLUS_EXPR:
15394 if (FLOAT_TYPE_P (type))
15395 return (tree_expr_nonnegative_warnv_p (op0,
15396 strict_overflow_p)
15397 && tree_expr_nonnegative_warnv_p (op1,
15398 strict_overflow_p));
15400 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15401 both unsigned and at least 2 bits shorter than the result. */
15402 if (TREE_CODE (type) == INTEGER_TYPE
15403 && TREE_CODE (op0) == NOP_EXPR
15404 && TREE_CODE (op1) == NOP_EXPR)
15406 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15407 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15408 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15409 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15411 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15412 TYPE_PRECISION (inner2)) + 1;
15413 return prec < TYPE_PRECISION (type);
15416 break;
15418 case MULT_EXPR:
15419 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15421 /* x * x is always non-negative for floating point x
15422 or without overflow. */
15423 if (operand_equal_p (op0, op1, 0)
15424 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15425 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15427 if (TYPE_OVERFLOW_UNDEFINED (type))
15428 *strict_overflow_p = true;
15429 return true;
15433 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15434 both unsigned and their total bits is shorter than the result. */
15435 if (TREE_CODE (type) == INTEGER_TYPE
15436 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15437 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15439 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15440 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15441 : TREE_TYPE (op0);
15442 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15443 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15444 : TREE_TYPE (op1);
15446 bool unsigned0 = TYPE_UNSIGNED (inner0);
15447 bool unsigned1 = TYPE_UNSIGNED (inner1);
15449 if (TREE_CODE (op0) == INTEGER_CST)
15450 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15452 if (TREE_CODE (op1) == INTEGER_CST)
15453 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15455 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15456 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15458 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15459 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15460 : TYPE_PRECISION (inner0);
15462 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15463 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15464 : TYPE_PRECISION (inner1);
15466 return precision0 + precision1 < TYPE_PRECISION (type);
15469 return false;
15471 case BIT_AND_EXPR:
15472 case MAX_EXPR:
15473 return (tree_expr_nonnegative_warnv_p (op0,
15474 strict_overflow_p)
15475 || tree_expr_nonnegative_warnv_p (op1,
15476 strict_overflow_p));
15478 case BIT_IOR_EXPR:
15479 case BIT_XOR_EXPR:
15480 case MIN_EXPR:
15481 case RDIV_EXPR:
15482 case TRUNC_DIV_EXPR:
15483 case CEIL_DIV_EXPR:
15484 case FLOOR_DIV_EXPR:
15485 case ROUND_DIV_EXPR:
15486 return (tree_expr_nonnegative_warnv_p (op0,
15487 strict_overflow_p)
15488 && tree_expr_nonnegative_warnv_p (op1,
15489 strict_overflow_p));
15491 case TRUNC_MOD_EXPR:
15492 case CEIL_MOD_EXPR:
15493 case FLOOR_MOD_EXPR:
15494 case ROUND_MOD_EXPR:
15495 return tree_expr_nonnegative_warnv_p (op0,
15496 strict_overflow_p);
15497 default:
15498 return tree_simple_nonnegative_warnv_p (code, type);
15501 /* We don't know sign of `t', so be conservative and return false. */
15502 return false;
15505 /* Return true if T is known to be non-negative. If the return
15506 value is based on the assumption that signed overflow is undefined,
15507 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15508 *STRICT_OVERFLOW_P. */
15510 bool
15511 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15513 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15514 return true;
15516 switch (TREE_CODE (t))
15518 case INTEGER_CST:
15519 return tree_int_cst_sgn (t) >= 0;
15521 case REAL_CST:
15522 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15524 case FIXED_CST:
15525 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15527 case COND_EXPR:
15528 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15529 strict_overflow_p)
15530 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15531 strict_overflow_p));
15532 default:
15533 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15534 TREE_TYPE (t));
15536 /* We don't know sign of `t', so be conservative and return false. */
15537 return false;
15540 /* Return true if T is known to be non-negative. If the return
15541 value is based on the assumption that signed overflow is undefined,
15542 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15543 *STRICT_OVERFLOW_P. */
15545 bool
15546 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15547 tree arg0, tree arg1, bool *strict_overflow_p)
15549 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15550 switch (DECL_FUNCTION_CODE (fndecl))
15552 CASE_FLT_FN (BUILT_IN_ACOS):
15553 CASE_FLT_FN (BUILT_IN_ACOSH):
15554 CASE_FLT_FN (BUILT_IN_CABS):
15555 CASE_FLT_FN (BUILT_IN_COSH):
15556 CASE_FLT_FN (BUILT_IN_ERFC):
15557 CASE_FLT_FN (BUILT_IN_EXP):
15558 CASE_FLT_FN (BUILT_IN_EXP10):
15559 CASE_FLT_FN (BUILT_IN_EXP2):
15560 CASE_FLT_FN (BUILT_IN_FABS):
15561 CASE_FLT_FN (BUILT_IN_FDIM):
15562 CASE_FLT_FN (BUILT_IN_HYPOT):
15563 CASE_FLT_FN (BUILT_IN_POW10):
15564 CASE_INT_FN (BUILT_IN_FFS):
15565 CASE_INT_FN (BUILT_IN_PARITY):
15566 CASE_INT_FN (BUILT_IN_POPCOUNT):
15567 case BUILT_IN_BSWAP32:
15568 case BUILT_IN_BSWAP64:
15569 /* Always true. */
15570 return true;
15572 CASE_FLT_FN (BUILT_IN_SQRT):
15573 /* sqrt(-0.0) is -0.0. */
15574 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15575 return true;
15576 return tree_expr_nonnegative_warnv_p (arg0,
15577 strict_overflow_p);
15579 CASE_FLT_FN (BUILT_IN_ASINH):
15580 CASE_FLT_FN (BUILT_IN_ATAN):
15581 CASE_FLT_FN (BUILT_IN_ATANH):
15582 CASE_FLT_FN (BUILT_IN_CBRT):
15583 CASE_FLT_FN (BUILT_IN_CEIL):
15584 CASE_FLT_FN (BUILT_IN_ERF):
15585 CASE_FLT_FN (BUILT_IN_EXPM1):
15586 CASE_FLT_FN (BUILT_IN_FLOOR):
15587 CASE_FLT_FN (BUILT_IN_FMOD):
15588 CASE_FLT_FN (BUILT_IN_FREXP):
15589 CASE_FLT_FN (BUILT_IN_ICEIL):
15590 CASE_FLT_FN (BUILT_IN_IFLOOR):
15591 CASE_FLT_FN (BUILT_IN_IRINT):
15592 CASE_FLT_FN (BUILT_IN_IROUND):
15593 CASE_FLT_FN (BUILT_IN_LCEIL):
15594 CASE_FLT_FN (BUILT_IN_LDEXP):
15595 CASE_FLT_FN (BUILT_IN_LFLOOR):
15596 CASE_FLT_FN (BUILT_IN_LLCEIL):
15597 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15598 CASE_FLT_FN (BUILT_IN_LLRINT):
15599 CASE_FLT_FN (BUILT_IN_LLROUND):
15600 CASE_FLT_FN (BUILT_IN_LRINT):
15601 CASE_FLT_FN (BUILT_IN_LROUND):
15602 CASE_FLT_FN (BUILT_IN_MODF):
15603 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15604 CASE_FLT_FN (BUILT_IN_RINT):
15605 CASE_FLT_FN (BUILT_IN_ROUND):
15606 CASE_FLT_FN (BUILT_IN_SCALB):
15607 CASE_FLT_FN (BUILT_IN_SCALBLN):
15608 CASE_FLT_FN (BUILT_IN_SCALBN):
15609 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15610 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15611 CASE_FLT_FN (BUILT_IN_SINH):
15612 CASE_FLT_FN (BUILT_IN_TANH):
15613 CASE_FLT_FN (BUILT_IN_TRUNC):
15614 /* True if the 1st argument is nonnegative. */
15615 return tree_expr_nonnegative_warnv_p (arg0,
15616 strict_overflow_p);
15618 CASE_FLT_FN (BUILT_IN_FMAX):
15619 /* True if the 1st OR 2nd arguments are nonnegative. */
15620 return (tree_expr_nonnegative_warnv_p (arg0,
15621 strict_overflow_p)
15622 || (tree_expr_nonnegative_warnv_p (arg1,
15623 strict_overflow_p)));
15625 CASE_FLT_FN (BUILT_IN_FMIN):
15626 /* True if the 1st AND 2nd arguments are nonnegative. */
15627 return (tree_expr_nonnegative_warnv_p (arg0,
15628 strict_overflow_p)
15629 && (tree_expr_nonnegative_warnv_p (arg1,
15630 strict_overflow_p)));
15632 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15633 /* True if the 2nd argument is nonnegative. */
15634 return tree_expr_nonnegative_warnv_p (arg1,
15635 strict_overflow_p);
15637 CASE_FLT_FN (BUILT_IN_POWI):
15638 /* True if the 1st argument is nonnegative or the second
15639 argument is an even integer. */
15640 if (TREE_CODE (arg1) == INTEGER_CST
15641 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15642 return true;
15643 return tree_expr_nonnegative_warnv_p (arg0,
15644 strict_overflow_p);
15646 CASE_FLT_FN (BUILT_IN_POW):
15647 /* True if the 1st argument is nonnegative or the second
15648 argument is an even integer valued real. */
15649 if (TREE_CODE (arg1) == REAL_CST)
15651 REAL_VALUE_TYPE c;
15652 HOST_WIDE_INT n;
15654 c = TREE_REAL_CST (arg1);
15655 n = real_to_integer (&c);
15656 if ((n & 1) == 0)
15658 REAL_VALUE_TYPE cint;
15659 real_from_integer (&cint, VOIDmode, n,
15660 n < 0 ? -1 : 0, 0);
15661 if (real_identical (&c, &cint))
15662 return true;
15665 return tree_expr_nonnegative_warnv_p (arg0,
15666 strict_overflow_p);
15668 default:
15669 break;
15671 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15672 type);
15675 /* Return true if T is known to be non-negative. If the return
15676 value is based on the assumption that signed overflow is undefined,
15677 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15678 *STRICT_OVERFLOW_P. */
15680 bool
15681 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15683 enum tree_code code = TREE_CODE (t);
15684 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15685 return true;
15687 switch (code)
15689 case TARGET_EXPR:
15691 tree temp = TARGET_EXPR_SLOT (t);
15692 t = TARGET_EXPR_INITIAL (t);
15694 /* If the initializer is non-void, then it's a normal expression
15695 that will be assigned to the slot. */
15696 if (!VOID_TYPE_P (t))
15697 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15699 /* Otherwise, the initializer sets the slot in some way. One common
15700 way is an assignment statement at the end of the initializer. */
15701 while (1)
15703 if (TREE_CODE (t) == BIND_EXPR)
15704 t = expr_last (BIND_EXPR_BODY (t));
15705 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15706 || TREE_CODE (t) == TRY_CATCH_EXPR)
15707 t = expr_last (TREE_OPERAND (t, 0));
15708 else if (TREE_CODE (t) == STATEMENT_LIST)
15709 t = expr_last (t);
15710 else
15711 break;
15713 if (TREE_CODE (t) == MODIFY_EXPR
15714 && TREE_OPERAND (t, 0) == temp)
15715 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15716 strict_overflow_p);
15718 return false;
15721 case CALL_EXPR:
15723 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15724 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15726 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15727 get_callee_fndecl (t),
15728 arg0,
15729 arg1,
15730 strict_overflow_p);
15732 case COMPOUND_EXPR:
15733 case MODIFY_EXPR:
15734 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15735 strict_overflow_p);
15736 case BIND_EXPR:
15737 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15738 strict_overflow_p);
15739 case SAVE_EXPR:
15740 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15741 strict_overflow_p);
15743 default:
15744 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15745 TREE_TYPE (t));
15748 /* We don't know sign of `t', so be conservative and return false. */
15749 return false;
15752 /* Return true if T is known to be non-negative. If the return
15753 value is based on the assumption that signed overflow is undefined,
15754 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15755 *STRICT_OVERFLOW_P. */
15757 bool
15758 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15760 enum tree_code code;
15761 if (t == error_mark_node)
15762 return false;
15764 code = TREE_CODE (t);
15765 switch (TREE_CODE_CLASS (code))
15767 case tcc_binary:
15768 case tcc_comparison:
15769 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15770 TREE_TYPE (t),
15771 TREE_OPERAND (t, 0),
15772 TREE_OPERAND (t, 1),
15773 strict_overflow_p);
15775 case tcc_unary:
15776 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15777 TREE_TYPE (t),
15778 TREE_OPERAND (t, 0),
15779 strict_overflow_p);
15781 case tcc_constant:
15782 case tcc_declaration:
15783 case tcc_reference:
15784 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15786 default:
15787 break;
15790 switch (code)
15792 case TRUTH_AND_EXPR:
15793 case TRUTH_OR_EXPR:
15794 case TRUTH_XOR_EXPR:
15795 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15796 TREE_TYPE (t),
15797 TREE_OPERAND (t, 0),
15798 TREE_OPERAND (t, 1),
15799 strict_overflow_p);
15800 case TRUTH_NOT_EXPR:
15801 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15802 TREE_TYPE (t),
15803 TREE_OPERAND (t, 0),
15804 strict_overflow_p);
15806 case COND_EXPR:
15807 case CONSTRUCTOR:
15808 case OBJ_TYPE_REF:
15809 case ASSERT_EXPR:
15810 case ADDR_EXPR:
15811 case WITH_SIZE_EXPR:
15812 case SSA_NAME:
15813 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15815 default:
15816 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15820 /* Return true if `t' is known to be non-negative. Handle warnings
15821 about undefined signed overflow. */
15823 bool
15824 tree_expr_nonnegative_p (tree t)
15826 bool ret, strict_overflow_p;
15828 strict_overflow_p = false;
15829 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15830 if (strict_overflow_p)
15831 fold_overflow_warning (("assuming signed overflow does not occur when "
15832 "determining that expression is always "
15833 "non-negative"),
15834 WARN_STRICT_OVERFLOW_MISC);
15835 return ret;
15839 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15840 For floating point we further ensure that T is not denormal.
15841 Similar logic is present in nonzero_address in rtlanal.h.
15843 If the return value is based on the assumption that signed overflow
15844 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15845 change *STRICT_OVERFLOW_P. */
15847 bool
15848 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15849 bool *strict_overflow_p)
15851 switch (code)
15853 case ABS_EXPR:
15854 return tree_expr_nonzero_warnv_p (op0,
15855 strict_overflow_p);
15857 case NOP_EXPR:
15859 tree inner_type = TREE_TYPE (op0);
15860 tree outer_type = type;
15862 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15863 && tree_expr_nonzero_warnv_p (op0,
15864 strict_overflow_p));
15866 break;
15868 case NON_LVALUE_EXPR:
15869 return tree_expr_nonzero_warnv_p (op0,
15870 strict_overflow_p);
15872 default:
15873 break;
15876 return false;
15879 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15880 For floating point we further ensure that T is not denormal.
15881 Similar logic is present in nonzero_address in rtlanal.h.
15883 If the return value is based on the assumption that signed overflow
15884 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15885 change *STRICT_OVERFLOW_P. */
15887 bool
15888 tree_binary_nonzero_warnv_p (enum tree_code code,
15889 tree type,
15890 tree op0,
15891 tree op1, bool *strict_overflow_p)
15893 bool sub_strict_overflow_p;
15894 switch (code)
15896 case POINTER_PLUS_EXPR:
15897 case PLUS_EXPR:
15898 if (TYPE_OVERFLOW_UNDEFINED (type))
15900 /* With the presence of negative values it is hard
15901 to say something. */
15902 sub_strict_overflow_p = false;
15903 if (!tree_expr_nonnegative_warnv_p (op0,
15904 &sub_strict_overflow_p)
15905 || !tree_expr_nonnegative_warnv_p (op1,
15906 &sub_strict_overflow_p))
15907 return false;
15908 /* One of operands must be positive and the other non-negative. */
15909 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15910 overflows, on a twos-complement machine the sum of two
15911 nonnegative numbers can never be zero. */
15912 return (tree_expr_nonzero_warnv_p (op0,
15913 strict_overflow_p)
15914 || tree_expr_nonzero_warnv_p (op1,
15915 strict_overflow_p));
15917 break;
15919 case MULT_EXPR:
15920 if (TYPE_OVERFLOW_UNDEFINED (type))
15922 if (tree_expr_nonzero_warnv_p (op0,
15923 strict_overflow_p)
15924 && tree_expr_nonzero_warnv_p (op1,
15925 strict_overflow_p))
15927 *strict_overflow_p = true;
15928 return true;
15931 break;
15933 case MIN_EXPR:
15934 sub_strict_overflow_p = false;
15935 if (tree_expr_nonzero_warnv_p (op0,
15936 &sub_strict_overflow_p)
15937 && tree_expr_nonzero_warnv_p (op1,
15938 &sub_strict_overflow_p))
15940 if (sub_strict_overflow_p)
15941 *strict_overflow_p = true;
15943 break;
15945 case MAX_EXPR:
15946 sub_strict_overflow_p = false;
15947 if (tree_expr_nonzero_warnv_p (op0,
15948 &sub_strict_overflow_p))
15950 if (sub_strict_overflow_p)
15951 *strict_overflow_p = true;
15953 /* When both operands are nonzero, then MAX must be too. */
15954 if (tree_expr_nonzero_warnv_p (op1,
15955 strict_overflow_p))
15956 return true;
15958 /* MAX where operand 0 is positive is positive. */
15959 return tree_expr_nonnegative_warnv_p (op0,
15960 strict_overflow_p);
15962 /* MAX where operand 1 is positive is positive. */
15963 else if (tree_expr_nonzero_warnv_p (op1,
15964 &sub_strict_overflow_p)
15965 && tree_expr_nonnegative_warnv_p (op1,
15966 &sub_strict_overflow_p))
15968 if (sub_strict_overflow_p)
15969 *strict_overflow_p = true;
15970 return true;
15972 break;
15974 case BIT_IOR_EXPR:
15975 return (tree_expr_nonzero_warnv_p (op1,
15976 strict_overflow_p)
15977 || tree_expr_nonzero_warnv_p (op0,
15978 strict_overflow_p));
15980 default:
15981 break;
15984 return false;
15987 /* Return true when T is an address and is known to be nonzero.
15988 For floating point we further ensure that T is not denormal.
15989 Similar logic is present in nonzero_address in rtlanal.h.
15991 If the return value is based on the assumption that signed overflow
15992 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15993 change *STRICT_OVERFLOW_P. */
15995 bool
15996 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15998 bool sub_strict_overflow_p;
15999 switch (TREE_CODE (t))
16001 case INTEGER_CST:
16002 return !integer_zerop (t);
16004 case ADDR_EXPR:
16006 tree base = TREE_OPERAND (t, 0);
16007 if (!DECL_P (base))
16008 base = get_base_address (base);
16010 if (!base)
16011 return false;
16013 /* Weak declarations may link to NULL. Other things may also be NULL
16014 so protect with -fdelete-null-pointer-checks; but not variables
16015 allocated on the stack. */
16016 if (DECL_P (base)
16017 && (flag_delete_null_pointer_checks
16018 || (DECL_CONTEXT (base)
16019 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16020 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16021 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16023 /* Constants are never weak. */
16024 if (CONSTANT_CLASS_P (base))
16025 return true;
16027 return false;
16030 case COND_EXPR:
16031 sub_strict_overflow_p = false;
16032 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16033 &sub_strict_overflow_p)
16034 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16035 &sub_strict_overflow_p))
16037 if (sub_strict_overflow_p)
16038 *strict_overflow_p = true;
16039 return true;
16041 break;
16043 default:
16044 break;
16046 return false;
16049 /* Return true when T is an address and is known to be nonzero.
16050 For floating point we further ensure that T is not denormal.
16051 Similar logic is present in nonzero_address in rtlanal.h.
16053 If the return value is based on the assumption that signed overflow
16054 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16055 change *STRICT_OVERFLOW_P. */
16057 bool
16058 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16060 tree type = TREE_TYPE (t);
16061 enum tree_code code;
16063 /* Doing something useful for floating point would need more work. */
16064 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16065 return false;
16067 code = TREE_CODE (t);
16068 switch (TREE_CODE_CLASS (code))
16070 case tcc_unary:
16071 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16072 strict_overflow_p);
16073 case tcc_binary:
16074 case tcc_comparison:
16075 return tree_binary_nonzero_warnv_p (code, type,
16076 TREE_OPERAND (t, 0),
16077 TREE_OPERAND (t, 1),
16078 strict_overflow_p);
16079 case tcc_constant:
16080 case tcc_declaration:
16081 case tcc_reference:
16082 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16084 default:
16085 break;
16088 switch (code)
16090 case TRUTH_NOT_EXPR:
16091 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16092 strict_overflow_p);
16094 case TRUTH_AND_EXPR:
16095 case TRUTH_OR_EXPR:
16096 case TRUTH_XOR_EXPR:
16097 return tree_binary_nonzero_warnv_p (code, type,
16098 TREE_OPERAND (t, 0),
16099 TREE_OPERAND (t, 1),
16100 strict_overflow_p);
16102 case COND_EXPR:
16103 case CONSTRUCTOR:
16104 case OBJ_TYPE_REF:
16105 case ASSERT_EXPR:
16106 case ADDR_EXPR:
16107 case WITH_SIZE_EXPR:
16108 case SSA_NAME:
16109 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16111 case COMPOUND_EXPR:
16112 case MODIFY_EXPR:
16113 case BIND_EXPR:
16114 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16115 strict_overflow_p);
16117 case SAVE_EXPR:
16118 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16119 strict_overflow_p);
16121 case CALL_EXPR:
16122 return alloca_call_p (t);
16124 default:
16125 break;
16127 return false;
16130 /* Return true when T is an address and is known to be nonzero.
16131 Handle warnings about undefined signed overflow. */
16133 bool
16134 tree_expr_nonzero_p (tree t)
16136 bool ret, strict_overflow_p;
16138 strict_overflow_p = false;
16139 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16140 if (strict_overflow_p)
16141 fold_overflow_warning (("assuming signed overflow does not occur when "
16142 "determining that expression is always "
16143 "non-zero"),
16144 WARN_STRICT_OVERFLOW_MISC);
16145 return ret;
16148 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16149 attempt to fold the expression to a constant without modifying TYPE,
16150 OP0 or OP1.
16152 If the expression could be simplified to a constant, then return
16153 the constant. If the expression would not be simplified to a
16154 constant, then return NULL_TREE. */
16156 tree
16157 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16159 tree tem = fold_binary (code, type, op0, op1);
16160 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16163 /* Given the components of a unary expression CODE, TYPE and OP0,
16164 attempt to fold the expression to a constant without modifying
16165 TYPE or OP0.
16167 If the expression could be simplified to a constant, then return
16168 the constant. If the expression would not be simplified to a
16169 constant, then return NULL_TREE. */
16171 tree
16172 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16174 tree tem = fold_unary (code, type, op0);
16175 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16178 /* If EXP represents referencing an element in a constant string
16179 (either via pointer arithmetic or array indexing), return the
16180 tree representing the value accessed, otherwise return NULL. */
16182 tree
16183 fold_read_from_constant_string (tree exp)
16185 if ((TREE_CODE (exp) == INDIRECT_REF
16186 || TREE_CODE (exp) == ARRAY_REF)
16187 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16189 tree exp1 = TREE_OPERAND (exp, 0);
16190 tree index;
16191 tree string;
16192 location_t loc = EXPR_LOCATION (exp);
16194 if (TREE_CODE (exp) == INDIRECT_REF)
16195 string = string_constant (exp1, &index);
16196 else
16198 tree low_bound = array_ref_low_bound (exp);
16199 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16201 /* Optimize the special-case of a zero lower bound.
16203 We convert the low_bound to sizetype to avoid some problems
16204 with constant folding. (E.g. suppose the lower bound is 1,
16205 and its mode is QI. Without the conversion,l (ARRAY
16206 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16207 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16208 if (! integer_zerop (low_bound))
16209 index = size_diffop_loc (loc, index,
16210 fold_convert_loc (loc, sizetype, low_bound));
16212 string = exp1;
16215 if (string
16216 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16217 && TREE_CODE (string) == STRING_CST
16218 && TREE_CODE (index) == INTEGER_CST
16219 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16220 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16221 == MODE_INT)
16222 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16223 return build_int_cst_type (TREE_TYPE (exp),
16224 (TREE_STRING_POINTER (string)
16225 [TREE_INT_CST_LOW (index)]));
16227 return NULL;
16230 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16231 an integer constant, real, or fixed-point constant.
16233 TYPE is the type of the result. */
16235 static tree
16236 fold_negate_const (tree arg0, tree type)
16238 tree t = NULL_TREE;
16240 switch (TREE_CODE (arg0))
16242 case INTEGER_CST:
16244 double_int val = tree_to_double_int (arg0);
16245 bool overflow;
16246 val = val.neg_with_overflow (&overflow);
16247 t = force_fit_type_double (type, val, 1,
16248 (overflow | TREE_OVERFLOW (arg0))
16249 && !TYPE_UNSIGNED (type));
16250 break;
16253 case REAL_CST:
16254 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16255 break;
16257 case FIXED_CST:
16259 FIXED_VALUE_TYPE f;
16260 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16261 &(TREE_FIXED_CST (arg0)), NULL,
16262 TYPE_SATURATING (type));
16263 t = build_fixed (type, f);
16264 /* Propagate overflow flags. */
16265 if (overflow_p | TREE_OVERFLOW (arg0))
16266 TREE_OVERFLOW (t) = 1;
16267 break;
16270 default:
16271 gcc_unreachable ();
16274 return t;
16277 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16278 an integer constant or real constant.
16280 TYPE is the type of the result. */
16282 tree
16283 fold_abs_const (tree arg0, tree type)
16285 tree t = NULL_TREE;
16287 switch (TREE_CODE (arg0))
16289 case INTEGER_CST:
16291 double_int val = tree_to_double_int (arg0);
16293 /* If the value is unsigned or non-negative, then the absolute value
16294 is the same as the ordinary value. */
16295 if (TYPE_UNSIGNED (type)
16296 || !val.is_negative ())
16297 t = arg0;
16299 /* If the value is negative, then the absolute value is
16300 its negation. */
16301 else
16303 bool overflow;
16304 val = val.neg_with_overflow (&overflow);
16305 t = force_fit_type_double (type, val, -1,
16306 overflow | TREE_OVERFLOW (arg0));
16309 break;
16311 case REAL_CST:
16312 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16313 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16314 else
16315 t = arg0;
16316 break;
16318 default:
16319 gcc_unreachable ();
16322 return t;
16325 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16326 constant. TYPE is the type of the result. */
16328 static tree
16329 fold_not_const (const_tree arg0, tree type)
16331 double_int val;
16333 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16335 val = ~tree_to_double_int (arg0);
16336 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16339 /* Given CODE, a relational operator, the target type, TYPE and two
16340 constant operands OP0 and OP1, return the result of the
16341 relational operation. If the result is not a compile time
16342 constant, then return NULL_TREE. */
16344 static tree
16345 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16347 int result, invert;
16349 /* From here on, the only cases we handle are when the result is
16350 known to be a constant. */
16352 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16354 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16355 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16357 /* Handle the cases where either operand is a NaN. */
16358 if (real_isnan (c0) || real_isnan (c1))
16360 switch (code)
16362 case EQ_EXPR:
16363 case ORDERED_EXPR:
16364 result = 0;
16365 break;
16367 case NE_EXPR:
16368 case UNORDERED_EXPR:
16369 case UNLT_EXPR:
16370 case UNLE_EXPR:
16371 case UNGT_EXPR:
16372 case UNGE_EXPR:
16373 case UNEQ_EXPR:
16374 result = 1;
16375 break;
16377 case LT_EXPR:
16378 case LE_EXPR:
16379 case GT_EXPR:
16380 case GE_EXPR:
16381 case LTGT_EXPR:
16382 if (flag_trapping_math)
16383 return NULL_TREE;
16384 result = 0;
16385 break;
16387 default:
16388 gcc_unreachable ();
16391 return constant_boolean_node (result, type);
16394 return constant_boolean_node (real_compare (code, c0, c1), type);
16397 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16399 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16400 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16401 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16404 /* Handle equality/inequality of complex constants. */
16405 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16407 tree rcond = fold_relational_const (code, type,
16408 TREE_REALPART (op0),
16409 TREE_REALPART (op1));
16410 tree icond = fold_relational_const (code, type,
16411 TREE_IMAGPART (op0),
16412 TREE_IMAGPART (op1));
16413 if (code == EQ_EXPR)
16414 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16415 else if (code == NE_EXPR)
16416 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16417 else
16418 return NULL_TREE;
16421 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16423 unsigned count = VECTOR_CST_NELTS (op0);
16424 tree *elts = XALLOCAVEC (tree, count);
16425 gcc_assert (VECTOR_CST_NELTS (op1) == count
16426 && TYPE_VECTOR_SUBPARTS (type) == count);
16428 for (unsigned i = 0; i < count; i++)
16430 tree elem_type = TREE_TYPE (type);
16431 tree elem0 = VECTOR_CST_ELT (op0, i);
16432 tree elem1 = VECTOR_CST_ELT (op1, i);
16434 tree tem = fold_relational_const (code, elem_type,
16435 elem0, elem1);
16437 if (tem == NULL_TREE)
16438 return NULL_TREE;
16440 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16443 return build_vector (type, elts);
16446 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16448 To compute GT, swap the arguments and do LT.
16449 To compute GE, do LT and invert the result.
16450 To compute LE, swap the arguments, do LT and invert the result.
16451 To compute NE, do EQ and invert the result.
16453 Therefore, the code below must handle only EQ and LT. */
16455 if (code == LE_EXPR || code == GT_EXPR)
16457 tree tem = op0;
16458 op0 = op1;
16459 op1 = tem;
16460 code = swap_tree_comparison (code);
16463 /* Note that it is safe to invert for real values here because we
16464 have already handled the one case that it matters. */
16466 invert = 0;
16467 if (code == NE_EXPR || code == GE_EXPR)
16469 invert = 1;
16470 code = invert_tree_comparison (code, false);
16473 /* Compute a result for LT or EQ if args permit;
16474 Otherwise return T. */
16475 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16477 if (code == EQ_EXPR)
16478 result = tree_int_cst_equal (op0, op1);
16479 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16480 result = INT_CST_LT_UNSIGNED (op0, op1);
16481 else
16482 result = INT_CST_LT (op0, op1);
16484 else
16485 return NULL_TREE;
16487 if (invert)
16488 result ^= 1;
16489 return constant_boolean_node (result, type);
16492 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16493 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16494 itself. */
16496 tree
16497 fold_build_cleanup_point_expr (tree type, tree expr)
16499 /* If the expression does not have side effects then we don't have to wrap
16500 it with a cleanup point expression. */
16501 if (!TREE_SIDE_EFFECTS (expr))
16502 return expr;
16504 /* If the expression is a return, check to see if the expression inside the
16505 return has no side effects or the right hand side of the modify expression
16506 inside the return. If either don't have side effects set we don't need to
16507 wrap the expression in a cleanup point expression. Note we don't check the
16508 left hand side of the modify because it should always be a return decl. */
16509 if (TREE_CODE (expr) == RETURN_EXPR)
16511 tree op = TREE_OPERAND (expr, 0);
16512 if (!op || !TREE_SIDE_EFFECTS (op))
16513 return expr;
16514 op = TREE_OPERAND (op, 1);
16515 if (!TREE_SIDE_EFFECTS (op))
16516 return expr;
16519 return build1 (CLEANUP_POINT_EXPR, type, expr);
16522 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16523 of an indirection through OP0, or NULL_TREE if no simplification is
16524 possible. */
16526 tree
16527 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16529 tree sub = op0;
16530 tree subtype;
16532 STRIP_NOPS (sub);
16533 subtype = TREE_TYPE (sub);
16534 if (!POINTER_TYPE_P (subtype))
16535 return NULL_TREE;
16537 if (TREE_CODE (sub) == ADDR_EXPR)
16539 tree op = TREE_OPERAND (sub, 0);
16540 tree optype = TREE_TYPE (op);
16541 /* *&CONST_DECL -> to the value of the const decl. */
16542 if (TREE_CODE (op) == CONST_DECL)
16543 return DECL_INITIAL (op);
16544 /* *&p => p; make sure to handle *&"str"[cst] here. */
16545 if (type == optype)
16547 tree fop = fold_read_from_constant_string (op);
16548 if (fop)
16549 return fop;
16550 else
16551 return op;
16553 /* *(foo *)&fooarray => fooarray[0] */
16554 else if (TREE_CODE (optype) == ARRAY_TYPE
16555 && type == TREE_TYPE (optype)
16556 && (!in_gimple_form
16557 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16559 tree type_domain = TYPE_DOMAIN (optype);
16560 tree min_val = size_zero_node;
16561 if (type_domain && TYPE_MIN_VALUE (type_domain))
16562 min_val = TYPE_MIN_VALUE (type_domain);
16563 if (in_gimple_form
16564 && TREE_CODE (min_val) != INTEGER_CST)
16565 return NULL_TREE;
16566 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16567 NULL_TREE, NULL_TREE);
16569 /* *(foo *)&complexfoo => __real__ complexfoo */
16570 else if (TREE_CODE (optype) == COMPLEX_TYPE
16571 && type == TREE_TYPE (optype))
16572 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16573 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16574 else if (TREE_CODE (optype) == VECTOR_TYPE
16575 && type == TREE_TYPE (optype))
16577 tree part_width = TYPE_SIZE (type);
16578 tree index = bitsize_int (0);
16579 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16583 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16584 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16586 tree op00 = TREE_OPERAND (sub, 0);
16587 tree op01 = TREE_OPERAND (sub, 1);
16589 STRIP_NOPS (op00);
16590 if (TREE_CODE (op00) == ADDR_EXPR)
16592 tree op00type;
16593 op00 = TREE_OPERAND (op00, 0);
16594 op00type = TREE_TYPE (op00);
16596 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16597 if (TREE_CODE (op00type) == VECTOR_TYPE
16598 && type == TREE_TYPE (op00type))
16600 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16601 tree part_width = TYPE_SIZE (type);
16602 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16603 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16604 tree index = bitsize_int (indexi);
16606 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16607 return fold_build3_loc (loc,
16608 BIT_FIELD_REF, type, op00,
16609 part_width, index);
16612 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16613 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16614 && type == TREE_TYPE (op00type))
16616 tree size = TYPE_SIZE_UNIT (type);
16617 if (tree_int_cst_equal (size, op01))
16618 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16620 /* ((foo *)&fooarray)[1] => fooarray[1] */
16621 else if (TREE_CODE (op00type) == ARRAY_TYPE
16622 && type == TREE_TYPE (op00type))
16624 tree type_domain = TYPE_DOMAIN (op00type);
16625 tree min_val = size_zero_node;
16626 if (type_domain && TYPE_MIN_VALUE (type_domain))
16627 min_val = TYPE_MIN_VALUE (type_domain);
16628 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16629 TYPE_SIZE_UNIT (type));
16630 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16631 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16632 NULL_TREE, NULL_TREE);
16637 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16638 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16639 && type == TREE_TYPE (TREE_TYPE (subtype))
16640 && (!in_gimple_form
16641 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16643 tree type_domain;
16644 tree min_val = size_zero_node;
16645 sub = build_fold_indirect_ref_loc (loc, sub);
16646 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16647 if (type_domain && TYPE_MIN_VALUE (type_domain))
16648 min_val = TYPE_MIN_VALUE (type_domain);
16649 if (in_gimple_form
16650 && TREE_CODE (min_val) != INTEGER_CST)
16651 return NULL_TREE;
16652 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16653 NULL_TREE);
16656 return NULL_TREE;
16659 /* Builds an expression for an indirection through T, simplifying some
16660 cases. */
16662 tree
16663 build_fold_indirect_ref_loc (location_t loc, tree t)
16665 tree type = TREE_TYPE (TREE_TYPE (t));
16666 tree sub = fold_indirect_ref_1 (loc, type, t);
16668 if (sub)
16669 return sub;
16671 return build1_loc (loc, INDIRECT_REF, type, t);
16674 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16676 tree
16677 fold_indirect_ref_loc (location_t loc, tree t)
16679 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16681 if (sub)
16682 return sub;
16683 else
16684 return t;
16687 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16688 whose result is ignored. The type of the returned tree need not be
16689 the same as the original expression. */
16691 tree
16692 fold_ignored_result (tree t)
16694 if (!TREE_SIDE_EFFECTS (t))
16695 return integer_zero_node;
16697 for (;;)
16698 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16700 case tcc_unary:
16701 t = TREE_OPERAND (t, 0);
16702 break;
16704 case tcc_binary:
16705 case tcc_comparison:
16706 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16707 t = TREE_OPERAND (t, 0);
16708 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16709 t = TREE_OPERAND (t, 1);
16710 else
16711 return t;
16712 break;
16714 case tcc_expression:
16715 switch (TREE_CODE (t))
16717 case COMPOUND_EXPR:
16718 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16719 return t;
16720 t = TREE_OPERAND (t, 0);
16721 break;
16723 case COND_EXPR:
16724 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16725 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16726 return t;
16727 t = TREE_OPERAND (t, 0);
16728 break;
16730 default:
16731 return t;
16733 break;
16735 default:
16736 return t;
16740 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16741 This can only be applied to objects of a sizetype. */
16743 tree
16744 round_up_loc (location_t loc, tree value, int divisor)
16746 tree div = NULL_TREE;
16748 gcc_assert (divisor > 0);
16749 if (divisor == 1)
16750 return value;
16752 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16753 have to do anything. Only do this when we are not given a const,
16754 because in that case, this check is more expensive than just
16755 doing it. */
16756 if (TREE_CODE (value) != INTEGER_CST)
16758 div = build_int_cst (TREE_TYPE (value), divisor);
16760 if (multiple_of_p (TREE_TYPE (value), value, div))
16761 return value;
16764 /* If divisor is a power of two, simplify this to bit manipulation. */
16765 if (divisor == (divisor & -divisor))
16767 if (TREE_CODE (value) == INTEGER_CST)
16769 double_int val = tree_to_double_int (value);
16770 bool overflow_p;
16772 if ((val.low & (divisor - 1)) == 0)
16773 return value;
16775 overflow_p = TREE_OVERFLOW (value);
16776 val.low &= ~(divisor - 1);
16777 val.low += divisor;
16778 if (val.low == 0)
16780 val.high++;
16781 if (val.high == 0)
16782 overflow_p = true;
16785 return force_fit_type_double (TREE_TYPE (value), val,
16786 -1, overflow_p);
16788 else
16790 tree t;
16792 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16793 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16794 t = build_int_cst (TREE_TYPE (value), -divisor);
16795 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16798 else
16800 if (!div)
16801 div = build_int_cst (TREE_TYPE (value), divisor);
16802 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16803 value = size_binop_loc (loc, MULT_EXPR, value, div);
16806 return value;
16809 /* Likewise, but round down. */
16811 tree
16812 round_down_loc (location_t loc, tree value, int divisor)
16814 tree div = NULL_TREE;
16816 gcc_assert (divisor > 0);
16817 if (divisor == 1)
16818 return value;
16820 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16821 have to do anything. Only do this when we are not given a const,
16822 because in that case, this check is more expensive than just
16823 doing it. */
16824 if (TREE_CODE (value) != INTEGER_CST)
16826 div = build_int_cst (TREE_TYPE (value), divisor);
16828 if (multiple_of_p (TREE_TYPE (value), value, div))
16829 return value;
16832 /* If divisor is a power of two, simplify this to bit manipulation. */
16833 if (divisor == (divisor & -divisor))
16835 tree t;
16837 t = build_int_cst (TREE_TYPE (value), -divisor);
16838 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16840 else
16842 if (!div)
16843 div = build_int_cst (TREE_TYPE (value), divisor);
16844 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16845 value = size_binop_loc (loc, MULT_EXPR, value, div);
16848 return value;
16851 /* Returns the pointer to the base of the object addressed by EXP and
16852 extracts the information about the offset of the access, storing it
16853 to PBITPOS and POFFSET. */
16855 static tree
16856 split_address_to_core_and_offset (tree exp,
16857 HOST_WIDE_INT *pbitpos, tree *poffset)
16859 tree core;
16860 enum machine_mode mode;
16861 int unsignedp, volatilep;
16862 HOST_WIDE_INT bitsize;
16863 location_t loc = EXPR_LOCATION (exp);
16865 if (TREE_CODE (exp) == ADDR_EXPR)
16867 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16868 poffset, &mode, &unsignedp, &volatilep,
16869 false);
16870 core = build_fold_addr_expr_loc (loc, core);
16872 else
16874 core = exp;
16875 *pbitpos = 0;
16876 *poffset = NULL_TREE;
16879 return core;
16882 /* Returns true if addresses of E1 and E2 differ by a constant, false
16883 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16885 bool
16886 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16888 tree core1, core2;
16889 HOST_WIDE_INT bitpos1, bitpos2;
16890 tree toffset1, toffset2, tdiff, type;
16892 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16893 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16895 if (bitpos1 % BITS_PER_UNIT != 0
16896 || bitpos2 % BITS_PER_UNIT != 0
16897 || !operand_equal_p (core1, core2, 0))
16898 return false;
16900 if (toffset1 && toffset2)
16902 type = TREE_TYPE (toffset1);
16903 if (type != TREE_TYPE (toffset2))
16904 toffset2 = fold_convert (type, toffset2);
16906 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16907 if (!cst_and_fits_in_hwi (tdiff))
16908 return false;
16910 *diff = int_cst_value (tdiff);
16912 else if (toffset1 || toffset2)
16914 /* If only one of the offsets is non-constant, the difference cannot
16915 be a constant. */
16916 return false;
16918 else
16919 *diff = 0;
16921 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16922 return true;
16925 /* Simplify the floating point expression EXP when the sign of the
16926 result is not significant. Return NULL_TREE if no simplification
16927 is possible. */
16929 tree
16930 fold_strip_sign_ops (tree exp)
16932 tree arg0, arg1;
16933 location_t loc = EXPR_LOCATION (exp);
16935 switch (TREE_CODE (exp))
16937 case ABS_EXPR:
16938 case NEGATE_EXPR:
16939 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16940 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16942 case MULT_EXPR:
16943 case RDIV_EXPR:
16944 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16945 return NULL_TREE;
16946 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16947 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16948 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16949 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16950 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16951 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16952 break;
16954 case COMPOUND_EXPR:
16955 arg0 = TREE_OPERAND (exp, 0);
16956 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16957 if (arg1)
16958 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16959 break;
16961 case COND_EXPR:
16962 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16963 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16964 if (arg0 || arg1)
16965 return fold_build3_loc (loc,
16966 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16967 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16968 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16969 break;
16971 case CALL_EXPR:
16973 const enum built_in_function fcode = builtin_mathfn_code (exp);
16974 switch (fcode)
16976 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16977 /* Strip copysign function call, return the 1st argument. */
16978 arg0 = CALL_EXPR_ARG (exp, 0);
16979 arg1 = CALL_EXPR_ARG (exp, 1);
16980 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16982 default:
16983 /* Strip sign ops from the argument of "odd" math functions. */
16984 if (negate_mathfn_p (fcode))
16986 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16987 if (arg0)
16988 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16990 break;
16993 break;
16995 default:
16996 break;
16998 return NULL_TREE;