re PR target/54131 (ICE building 416.gamess, reload_cse_simplify_operands)
[official-gcc.git] / gcc / fold-const.c
blob93f38cbbf2161d3c4e18541ccb66fa98cfe57265
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
454 /* Fall through. */
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
478 case NOP_EXPR:
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type) == REAL_TYPE)
482 tree tem = strip_float_extensions (t);
483 if (tem != t)
484 return negate_expr_p (tem);
486 break;
488 case CALL_EXPR:
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t)))
491 return negate_expr_p (CALL_EXPR_ARG (t, 0));
492 break;
494 case RSHIFT_EXPR:
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
498 tree op1 = TREE_OPERAND (t, 1);
499 if (TREE_INT_CST_HIGH (op1) == 0
500 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
501 == TREE_INT_CST_LOW (op1))
502 return true;
504 break;
506 default:
507 break;
509 return false;
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
515 returned. */
517 static tree
518 fold_negate_expr (location_t loc, tree t)
520 tree type = TREE_TYPE (t);
521 tree tem;
523 switch (TREE_CODE (t))
525 /* Convert - (~A) to A + 1. */
526 case BIT_NOT_EXPR:
527 if (INTEGRAL_TYPE_P (type))
528 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
529 build_int_cst (type, 1));
530 break;
532 case INTEGER_CST:
533 tem = fold_negate_const (t, type);
534 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
535 || !TYPE_OVERFLOW_TRAPS (type))
536 return tem;
537 break;
539 case REAL_CST:
540 tem = fold_negate_const (t, type);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 return tem;
544 break;
546 case FIXED_CST:
547 tem = fold_negate_const (t, type);
548 return tem;
550 case COMPLEX_CST:
552 tree rpart = negate_expr (TREE_REALPART (t));
553 tree ipart = negate_expr (TREE_IMAGPART (t));
555 if ((TREE_CODE (rpart) == REAL_CST
556 && TREE_CODE (ipart) == REAL_CST)
557 || (TREE_CODE (rpart) == INTEGER_CST
558 && TREE_CODE (ipart) == INTEGER_CST))
559 return build_complex (type, rpart, ipart);
561 break;
563 case COMPLEX_EXPR:
564 if (negate_expr_p (t))
565 return fold_build2_loc (loc, COMPLEX_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
567 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
568 break;
570 case CONJ_EXPR:
571 if (negate_expr_p (t))
572 return fold_build1_loc (loc, CONJ_EXPR, type,
573 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
574 break;
576 case NEGATE_EXPR:
577 return TREE_OPERAND (t, 0);
579 case PLUS_EXPR:
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t, 1))
585 && reorder_operands_p (TREE_OPERAND (t, 0),
586 TREE_OPERAND (t, 1)))
588 tem = negate_expr (TREE_OPERAND (t, 1));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t, 0)))
596 tem = negate_expr (TREE_OPERAND (t, 0));
597 return fold_build2_loc (loc, MINUS_EXPR, type,
598 tem, TREE_OPERAND (t, 1));
601 break;
603 case MINUS_EXPR:
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
607 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
610 break;
612 case MULT_EXPR:
613 if (TYPE_UNSIGNED (type))
614 break;
616 /* Fall through. */
618 case RDIV_EXPR:
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
621 tem = TREE_OPERAND (t, 1);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 TREE_OPERAND (t, 0), negate_expr (tem));
625 tem = TREE_OPERAND (t, 0);
626 if (negate_expr_p (tem))
627 return fold_build2_loc (loc, TREE_CODE (t), type,
628 negate_expr (tem), TREE_OPERAND (t, 1));
630 break;
632 case TRUNC_DIV_EXPR:
633 case ROUND_DIV_EXPR:
634 case FLOOR_DIV_EXPR:
635 case CEIL_DIV_EXPR:
636 case EXACT_DIV_EXPR:
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
641 overflow. */
642 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
644 const char * const warnmsg = G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
649 if (INTEGRAL_TYPE_P (type)
650 && (TREE_CODE (tem) != INTEGER_CST
651 || integer_onep (tem)))
652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
659 if (INTEGRAL_TYPE_P (type)
660 && (TREE_CODE (tem) != INTEGER_CST
661 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
667 break;
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
677 break;
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
684 tree fndecl, arg;
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
690 break;
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
696 tree op1 = TREE_OPERAND (t, 1);
697 if (TREE_INT_CST_HIGH (op1) == 0
698 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
699 == TREE_INT_CST_LOW (op1))
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
709 break;
711 default:
712 break;
715 return NULL_TREE;
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
720 return NULL_TREE. */
722 static tree
723 negate_expr (tree t)
725 tree type, tem;
726 location_t loc;
728 if (t == NULL_TREE)
729 return NULL_TREE;
731 loc = EXPR_LOCATION (t);
732 type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
735 tem = fold_negate_expr (loc, t);
736 if (!tem)
737 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
738 return fold_convert_loc (loc, type, tem);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
765 tree var = 0;
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
822 else if (TREE_CODE (in) == BIT_NOT_EXPR
823 && code == PLUS_EXPR)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp = build_one_cst (TREE_TYPE (in));
827 var = negate_expr (TREE_OPERAND (in, 0));
829 else if (TREE_CONSTANT (in))
830 *conp = in;
831 else
832 var = in;
834 if (negate_p)
836 if (*litp)
837 *minus_litp = *litp, *litp = 0;
838 else if (*minus_litp)
839 *litp = *minus_litp, *minus_litp = 0;
840 *conp = negate_expr (*conp);
841 var = negate_expr (var);
844 return var;
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
852 static tree
853 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
855 if (t1 == 0)
856 return t2;
857 else if (t2 == 0)
858 return t1;
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
864 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
866 if (code == PLUS_EXPR)
868 if (TREE_CODE (t1) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t2),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t1, 0)));
873 else if (TREE_CODE (t2) == NEGATE_EXPR)
874 return build2_loc (loc, MINUS_EXPR, type,
875 fold_convert_loc (loc, type, t1),
876 fold_convert_loc (loc, type,
877 TREE_OPERAND (t2, 0)));
878 else if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
881 else if (code == MINUS_EXPR)
883 if (integer_zerop (t2))
884 return fold_convert_loc (loc, type, t1);
887 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
888 fold_convert_loc (loc, type, t2));
891 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
898 static bool
899 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
901 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
902 return false;
903 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
904 return false;
906 switch (code)
908 case LSHIFT_EXPR:
909 case RSHIFT_EXPR:
910 case LROTATE_EXPR:
911 case RROTATE_EXPR:
912 return true;
914 default:
915 break;
918 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
919 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
920 && TYPE_MODE (type1) == TYPE_MODE (type2);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
928 static tree
929 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
930 int overflowable)
932 double_int op1, op2, res, tmp;
933 tree t;
934 tree type = TREE_TYPE (arg1);
935 bool uns = TYPE_UNSIGNED (type);
936 bool overflow = false;
938 op1 = tree_to_double_int (arg1);
939 op2 = tree_to_double_int (arg2);
941 switch (code)
943 case BIT_IOR_EXPR:
944 res = op1 | op2;
945 break;
947 case BIT_XOR_EXPR:
948 res = op1 ^ op2;
949 break;
951 case BIT_AND_EXPR:
952 res = op1 & op2;
953 break;
955 case RSHIFT_EXPR:
956 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
957 break;
959 case LSHIFT_EXPR:
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
964 break;
966 case RROTATE_EXPR:
967 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
968 break;
970 case LROTATE_EXPR:
971 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
972 break;
974 case PLUS_EXPR:
975 res = op1.add_with_sign (op2, false, &overflow);
976 break;
978 case MINUS_EXPR:
979 res = op1.sub_with_overflow (op2, &overflow);
980 break;
982 case MULT_EXPR:
983 res = op1.mul_with_sign (op2, false, &overflow);
984 break;
986 case MULT_HIGHPART_EXPR:
987 /* ??? Need quad precision, or an additional shift operand
988 to the multiply primitive, to handle very large highparts. */
989 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
990 return NULL_TREE;
991 tmp = op1 - op2;
992 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
993 break;
995 case TRUNC_DIV_EXPR:
996 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
997 case EXACT_DIV_EXPR:
998 /* This is a shortcut for a common special case. */
999 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1000 && !TREE_OVERFLOW (arg1)
1001 && !TREE_OVERFLOW (arg2)
1002 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1004 if (code == CEIL_DIV_EXPR)
1005 op1.low += op2.low - 1;
1007 res.low = op1.low / op2.low, res.high = 0;
1008 break;
1011 /* ... fall through ... */
1013 case ROUND_DIV_EXPR:
1014 if (op2.is_zero ())
1015 return NULL_TREE;
1016 if (op2.is_one ())
1018 res = op1;
1019 break;
1021 if (op1 == op2 && !op1.is_zero ())
1023 res = double_int_one;
1024 break;
1026 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1027 break;
1029 case TRUNC_MOD_EXPR:
1030 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1031 /* This is a shortcut for a common special case. */
1032 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1033 && !TREE_OVERFLOW (arg1)
1034 && !TREE_OVERFLOW (arg2)
1035 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1037 if (code == CEIL_MOD_EXPR)
1038 op1.low += op2.low - 1;
1039 res.low = op1.low % op2.low, res.high = 0;
1040 break;
1043 /* ... fall through ... */
1045 case ROUND_MOD_EXPR:
1046 if (op2.is_zero ())
1047 return NULL_TREE;
1048 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1049 break;
1051 case MIN_EXPR:
1052 res = op1.min (op2, uns);
1053 break;
1055 case MAX_EXPR:
1056 res = op1.max (op2, uns);
1057 break;
1059 default:
1060 return NULL_TREE;
1063 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1064 (!uns && overflow)
1065 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1067 return t;
1070 tree
1071 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1073 return int_const_binop_1 (code, arg1, arg2, 1);
1076 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1077 constant. We assume ARG1 and ARG2 have the same data type, or at least
1078 are the same kind of constant and the same machine mode. Return zero if
1079 combining the constants is not allowed in the current operating mode. */
1081 static tree
1082 const_binop (enum tree_code code, tree arg1, tree arg2)
1084 /* Sanity check for the recursive cases. */
1085 if (!arg1 || !arg2)
1086 return NULL_TREE;
1088 STRIP_NOPS (arg1);
1089 STRIP_NOPS (arg2);
1091 if (TREE_CODE (arg1) == INTEGER_CST)
1092 return int_const_binop (code, arg1, arg2);
1094 if (TREE_CODE (arg1) == REAL_CST)
1096 enum machine_mode mode;
1097 REAL_VALUE_TYPE d1;
1098 REAL_VALUE_TYPE d2;
1099 REAL_VALUE_TYPE value;
1100 REAL_VALUE_TYPE result;
1101 bool inexact;
1102 tree t, type;
1104 /* The following codes are handled by real_arithmetic. */
1105 switch (code)
1107 case PLUS_EXPR:
1108 case MINUS_EXPR:
1109 case MULT_EXPR:
1110 case RDIV_EXPR:
1111 case MIN_EXPR:
1112 case MAX_EXPR:
1113 break;
1115 default:
1116 return NULL_TREE;
1119 d1 = TREE_REAL_CST (arg1);
1120 d2 = TREE_REAL_CST (arg2);
1122 type = TREE_TYPE (arg1);
1123 mode = TYPE_MODE (type);
1125 /* Don't perform operation if we honor signaling NaNs and
1126 either operand is a NaN. */
1127 if (HONOR_SNANS (mode)
1128 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1129 return NULL_TREE;
1131 /* Don't perform operation if it would raise a division
1132 by zero exception. */
1133 if (code == RDIV_EXPR
1134 && REAL_VALUES_EQUAL (d2, dconst0)
1135 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1136 return NULL_TREE;
1138 /* If either operand is a NaN, just return it. Otherwise, set up
1139 for floating-point trap; we return an overflow. */
1140 if (REAL_VALUE_ISNAN (d1))
1141 return arg1;
1142 else if (REAL_VALUE_ISNAN (d2))
1143 return arg2;
1145 inexact = real_arithmetic (&value, code, &d1, &d2);
1146 real_convert (&result, mode, &value);
1148 /* Don't constant fold this floating point operation if
1149 the result has overflowed and flag_trapping_math. */
1150 if (flag_trapping_math
1151 && MODE_HAS_INFINITIES (mode)
1152 && REAL_VALUE_ISINF (result)
1153 && !REAL_VALUE_ISINF (d1)
1154 && !REAL_VALUE_ISINF (d2))
1155 return NULL_TREE;
1157 /* Don't constant fold this floating point operation if the
1158 result may dependent upon the run-time rounding mode and
1159 flag_rounding_math is set, or if GCC's software emulation
1160 is unable to accurately represent the result. */
1161 if ((flag_rounding_math
1162 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1163 && (inexact || !real_identical (&result, &value)))
1164 return NULL_TREE;
1166 t = build_real (type, result);
1168 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1169 return t;
1172 if (TREE_CODE (arg1) == FIXED_CST)
1174 FIXED_VALUE_TYPE f1;
1175 FIXED_VALUE_TYPE f2;
1176 FIXED_VALUE_TYPE result;
1177 tree t, type;
1178 int sat_p;
1179 bool overflow_p;
1181 /* The following codes are handled by fixed_arithmetic. */
1182 switch (code)
1184 case PLUS_EXPR:
1185 case MINUS_EXPR:
1186 case MULT_EXPR:
1187 case TRUNC_DIV_EXPR:
1188 f2 = TREE_FIXED_CST (arg2);
1189 break;
1191 case LSHIFT_EXPR:
1192 case RSHIFT_EXPR:
1193 f2.data.high = TREE_INT_CST_HIGH (arg2);
1194 f2.data.low = TREE_INT_CST_LOW (arg2);
1195 f2.mode = SImode;
1196 break;
1198 default:
1199 return NULL_TREE;
1202 f1 = TREE_FIXED_CST (arg1);
1203 type = TREE_TYPE (arg1);
1204 sat_p = TYPE_SATURATING (type);
1205 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1206 t = build_fixed (type, result);
1207 /* Propagate overflow flags. */
1208 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1209 TREE_OVERFLOW (t) = 1;
1210 return t;
1213 if (TREE_CODE (arg1) == COMPLEX_CST)
1215 tree type = TREE_TYPE (arg1);
1216 tree r1 = TREE_REALPART (arg1);
1217 tree i1 = TREE_IMAGPART (arg1);
1218 tree r2 = TREE_REALPART (arg2);
1219 tree i2 = TREE_IMAGPART (arg2);
1220 tree real, imag;
1222 switch (code)
1224 case PLUS_EXPR:
1225 case MINUS_EXPR:
1226 real = const_binop (code, r1, r2);
1227 imag = const_binop (code, i1, i2);
1228 break;
1230 case MULT_EXPR:
1231 if (COMPLEX_FLOAT_TYPE_P (type))
1232 return do_mpc_arg2 (arg1, arg2, type,
1233 /* do_nonfinite= */ folding_initializer,
1234 mpc_mul);
1236 real = const_binop (MINUS_EXPR,
1237 const_binop (MULT_EXPR, r1, r2),
1238 const_binop (MULT_EXPR, i1, i2));
1239 imag = const_binop (PLUS_EXPR,
1240 const_binop (MULT_EXPR, r1, i2),
1241 const_binop (MULT_EXPR, i1, r2));
1242 break;
1244 case RDIV_EXPR:
1245 if (COMPLEX_FLOAT_TYPE_P (type))
1246 return do_mpc_arg2 (arg1, arg2, type,
1247 /* do_nonfinite= */ folding_initializer,
1248 mpc_div);
1249 /* Fallthru ... */
1250 case TRUNC_DIV_EXPR:
1251 case CEIL_DIV_EXPR:
1252 case FLOOR_DIV_EXPR:
1253 case ROUND_DIV_EXPR:
1254 if (flag_complex_method == 0)
1256 /* Keep this algorithm in sync with
1257 tree-complex.c:expand_complex_div_straight().
1259 Expand complex division to scalars, straightforward algorithm.
1260 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1261 t = br*br + bi*bi
1263 tree magsquared
1264 = const_binop (PLUS_EXPR,
1265 const_binop (MULT_EXPR, r2, r2),
1266 const_binop (MULT_EXPR, i2, i2));
1267 tree t1
1268 = const_binop (PLUS_EXPR,
1269 const_binop (MULT_EXPR, r1, r2),
1270 const_binop (MULT_EXPR, i1, i2));
1271 tree t2
1272 = const_binop (MINUS_EXPR,
1273 const_binop (MULT_EXPR, i1, r2),
1274 const_binop (MULT_EXPR, r1, i2));
1276 real = const_binop (code, t1, magsquared);
1277 imag = const_binop (code, t2, magsquared);
1279 else
1281 /* Keep this algorithm in sync with
1282 tree-complex.c:expand_complex_div_wide().
1284 Expand complex division to scalars, modified algorithm to minimize
1285 overflow with wide input ranges. */
1286 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1287 fold_abs_const (r2, TREE_TYPE (type)),
1288 fold_abs_const (i2, TREE_TYPE (type)));
1290 if (integer_nonzerop (compare))
1292 /* In the TRUE branch, we compute
1293 ratio = br/bi;
1294 div = (br * ratio) + bi;
1295 tr = (ar * ratio) + ai;
1296 ti = (ai * ratio) - ar;
1297 tr = tr / div;
1298 ti = ti / div; */
1299 tree ratio = const_binop (code, r2, i2);
1300 tree div = const_binop (PLUS_EXPR, i2,
1301 const_binop (MULT_EXPR, r2, ratio));
1302 real = const_binop (MULT_EXPR, r1, ratio);
1303 real = const_binop (PLUS_EXPR, real, i1);
1304 real = const_binop (code, real, div);
1306 imag = const_binop (MULT_EXPR, i1, ratio);
1307 imag = const_binop (MINUS_EXPR, imag, r1);
1308 imag = const_binop (code, imag, div);
1310 else
1312 /* In the FALSE branch, we compute
1313 ratio = d/c;
1314 divisor = (d * ratio) + c;
1315 tr = (b * ratio) + a;
1316 ti = b - (a * ratio);
1317 tr = tr / div;
1318 ti = ti / div; */
1319 tree ratio = const_binop (code, i2, r2);
1320 tree div = const_binop (PLUS_EXPR, r2,
1321 const_binop (MULT_EXPR, i2, ratio));
1323 real = const_binop (MULT_EXPR, i1, ratio);
1324 real = const_binop (PLUS_EXPR, real, r1);
1325 real = const_binop (code, real, div);
1327 imag = const_binop (MULT_EXPR, r1, ratio);
1328 imag = const_binop (MINUS_EXPR, i1, imag);
1329 imag = const_binop (code, imag, div);
1332 break;
1334 default:
1335 return NULL_TREE;
1338 if (real && imag)
1339 return build_complex (type, real, imag);
1342 if (TREE_CODE (arg1) == VECTOR_CST
1343 && TREE_CODE (arg2) == VECTOR_CST)
1345 tree type = TREE_TYPE(arg1);
1346 int count = TYPE_VECTOR_SUBPARTS (type), i;
1347 tree *elts = XALLOCAVEC (tree, count);
1349 for (i = 0; i < count; i++)
1351 tree elem1 = VECTOR_CST_ELT (arg1, i);
1352 tree elem2 = VECTOR_CST_ELT (arg2, i);
1354 elts[i] = const_binop (code, elem1, elem2);
1356 /* It is possible that const_binop cannot handle the given
1357 code and return NULL_TREE */
1358 if(elts[i] == NULL_TREE)
1359 return NULL_TREE;
1362 return build_vector (type, elts);
1364 return NULL_TREE;
1367 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1368 indicates which particular sizetype to create. */
1370 tree
1371 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1373 return build_int_cst (sizetype_tab[(int) kind], number);
1376 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1377 is a tree code. The type of the result is taken from the operands.
1378 Both must be equivalent integer types, ala int_binop_types_match_p.
1379 If the operands are constant, so is the result. */
1381 tree
1382 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1384 tree type = TREE_TYPE (arg0);
1386 if (arg0 == error_mark_node || arg1 == error_mark_node)
1387 return error_mark_node;
1389 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1390 TREE_TYPE (arg1)));
1392 /* Handle the special case of two integer constants faster. */
1393 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1395 /* And some specific cases even faster than that. */
1396 if (code == PLUS_EXPR)
1398 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1399 return arg1;
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MINUS_EXPR)
1405 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1406 return arg0;
1408 else if (code == MULT_EXPR)
1410 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1411 return arg1;
1414 /* Handle general case of two integer constants. For sizetype
1415 constant calculations we always want to know about overflow,
1416 even in the unsigned case. */
1417 return int_const_binop_1 (code, arg0, arg1, -1);
1420 return fold_build2_loc (loc, code, type, arg0, arg1);
1423 /* Given two values, either both of sizetype or both of bitsizetype,
1424 compute the difference between the two values. Return the value
1425 in signed type corresponding to the type of the operands. */
1427 tree
1428 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1430 tree type = TREE_TYPE (arg0);
1431 tree ctype;
1433 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1434 TREE_TYPE (arg1)));
1436 /* If the type is already signed, just do the simple thing. */
1437 if (!TYPE_UNSIGNED (type))
1438 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1440 if (type == sizetype)
1441 ctype = ssizetype;
1442 else if (type == bitsizetype)
1443 ctype = sbitsizetype;
1444 else
1445 ctype = signed_type_for (type);
1447 /* If either operand is not a constant, do the conversions to the signed
1448 type and subtract. The hardware will do the right thing with any
1449 overflow in the subtraction. */
1450 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1451 return size_binop_loc (loc, MINUS_EXPR,
1452 fold_convert_loc (loc, ctype, arg0),
1453 fold_convert_loc (loc, ctype, arg1));
1455 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1456 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1457 overflow) and negate (which can't either). Special-case a result
1458 of zero while we're here. */
1459 if (tree_int_cst_equal (arg0, arg1))
1460 return build_int_cst (ctype, 0);
1461 else if (tree_int_cst_lt (arg1, arg0))
1462 return fold_convert_loc (loc, ctype,
1463 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1464 else
1465 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1466 fold_convert_loc (loc, ctype,
1467 size_binop_loc (loc,
1468 MINUS_EXPR,
1469 arg1, arg0)));
1472 /* A subroutine of fold_convert_const handling conversions of an
1473 INTEGER_CST to another integer type. */
1475 static tree
1476 fold_convert_const_int_from_int (tree type, const_tree arg1)
1478 tree t;
1480 /* Given an integer constant, make new constant with new type,
1481 appropriately sign-extended or truncated. */
1482 t = force_fit_type_double (type, tree_to_double_int (arg1),
1483 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1484 (TREE_INT_CST_HIGH (arg1) < 0
1485 && (TYPE_UNSIGNED (type)
1486 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1487 | TREE_OVERFLOW (arg1));
1489 return t;
1492 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1493 to an integer type. */
1495 static tree
1496 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1498 int overflow = 0;
1499 tree t;
1501 /* The following code implements the floating point to integer
1502 conversion rules required by the Java Language Specification,
1503 that IEEE NaNs are mapped to zero and values that overflow
1504 the target precision saturate, i.e. values greater than
1505 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1506 are mapped to INT_MIN. These semantics are allowed by the
1507 C and C++ standards that simply state that the behavior of
1508 FP-to-integer conversion is unspecified upon overflow. */
1510 double_int val;
1511 REAL_VALUE_TYPE r;
1512 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1514 switch (code)
1516 case FIX_TRUNC_EXPR:
1517 real_trunc (&r, VOIDmode, &x);
1518 break;
1520 default:
1521 gcc_unreachable ();
1524 /* If R is NaN, return zero and show we have an overflow. */
1525 if (REAL_VALUE_ISNAN (r))
1527 overflow = 1;
1528 val = double_int_zero;
1531 /* See if R is less than the lower bound or greater than the
1532 upper bound. */
1534 if (! overflow)
1536 tree lt = TYPE_MIN_VALUE (type);
1537 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1538 if (REAL_VALUES_LESS (r, l))
1540 overflow = 1;
1541 val = tree_to_double_int (lt);
1545 if (! overflow)
1547 tree ut = TYPE_MAX_VALUE (type);
1548 if (ut)
1550 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1551 if (REAL_VALUES_LESS (u, r))
1553 overflow = 1;
1554 val = tree_to_double_int (ut);
1559 if (! overflow)
1560 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1562 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1563 return t;
1566 /* A subroutine of fold_convert_const handling conversions of a
1567 FIXED_CST to an integer type. */
1569 static tree
1570 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1572 tree t;
1573 double_int temp, temp_trunc;
1574 unsigned int mode;
1576 /* Right shift FIXED_CST to temp by fbit. */
1577 temp = TREE_FIXED_CST (arg1).data;
1578 mode = TREE_FIXED_CST (arg1).mode;
1579 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1581 temp = temp.rshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 /* Left shift temp to temp_trunc by fbit. */
1586 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1587 HOST_BITS_PER_DOUBLE_INT,
1588 SIGNED_FIXED_POINT_MODE_P (mode));
1590 else
1592 temp = double_int_zero;
1593 temp_trunc = double_int_zero;
1596 /* If FIXED_CST is negative, we need to round the value toward 0.
1597 By checking if the fractional bits are not zero to add 1 to temp. */
1598 if (SIGNED_FIXED_POINT_MODE_P (mode)
1599 && temp_trunc.is_negative ()
1600 && TREE_FIXED_CST (arg1).data != temp_trunc)
1601 temp += double_int_one;
1603 /* Given a fixed-point constant, make new constant with new type,
1604 appropriately sign-extended or truncated. */
1605 t = force_fit_type_double (type, temp, -1,
1606 (temp.is_negative ()
1607 && (TYPE_UNSIGNED (type)
1608 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1609 | TREE_OVERFLOW (arg1));
1611 return t;
1614 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1615 to another floating point type. */
1617 static tree
1618 fold_convert_const_real_from_real (tree type, const_tree arg1)
1620 REAL_VALUE_TYPE value;
1621 tree t;
1623 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1624 t = build_real (type, value);
1626 /* If converting an infinity or NAN to a representation that doesn't
1627 have one, set the overflow bit so that we can produce some kind of
1628 error message at the appropriate point if necessary. It's not the
1629 most user-friendly message, but it's better than nothing. */
1630 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1631 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1632 TREE_OVERFLOW (t) = 1;
1633 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1634 && !MODE_HAS_NANS (TYPE_MODE (type)))
1635 TREE_OVERFLOW (t) = 1;
1636 /* Regular overflow, conversion produced an infinity in a mode that
1637 can't represent them. */
1638 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1639 && REAL_VALUE_ISINF (value)
1640 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1641 TREE_OVERFLOW (t) = 1;
1642 else
1643 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1644 return t;
1647 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1648 to a floating point type. */
1650 static tree
1651 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1653 REAL_VALUE_TYPE value;
1654 tree t;
1656 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1657 t = build_real (type, value);
1659 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1660 return t;
1663 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1664 to another fixed-point type. */
1666 static tree
1667 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1669 FIXED_VALUE_TYPE value;
1670 tree t;
1671 bool overflow_p;
1673 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1674 TYPE_SATURATING (type));
1675 t = build_fixed (type, value);
1677 /* Propagate overflow flags. */
1678 if (overflow_p | TREE_OVERFLOW (arg1))
1679 TREE_OVERFLOW (t) = 1;
1680 return t;
1683 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1684 to a fixed-point type. */
1686 static tree
1687 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1689 FIXED_VALUE_TYPE value;
1690 tree t;
1691 bool overflow_p;
1693 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1694 TREE_INT_CST (arg1),
1695 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1696 TYPE_SATURATING (type));
1697 t = build_fixed (type, value);
1699 /* Propagate overflow flags. */
1700 if (overflow_p | TREE_OVERFLOW (arg1))
1701 TREE_OVERFLOW (t) = 1;
1702 return t;
1705 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1706 to a fixed-point type. */
1708 static tree
1709 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1711 FIXED_VALUE_TYPE value;
1712 tree t;
1713 bool overflow_p;
1715 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1716 &TREE_REAL_CST (arg1),
1717 TYPE_SATURATING (type));
1718 t = build_fixed (type, value);
1720 /* Propagate overflow flags. */
1721 if (overflow_p | TREE_OVERFLOW (arg1))
1722 TREE_OVERFLOW (t) = 1;
1723 return t;
1726 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1727 type TYPE. If no simplification can be done return NULL_TREE. */
1729 static tree
1730 fold_convert_const (enum tree_code code, tree type, tree arg1)
1732 if (TREE_TYPE (arg1) == type)
1733 return arg1;
1735 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1736 || TREE_CODE (type) == OFFSET_TYPE)
1738 if (TREE_CODE (arg1) == INTEGER_CST)
1739 return fold_convert_const_int_from_int (type, arg1);
1740 else if (TREE_CODE (arg1) == REAL_CST)
1741 return fold_convert_const_int_from_real (code, type, arg1);
1742 else if (TREE_CODE (arg1) == FIXED_CST)
1743 return fold_convert_const_int_from_fixed (type, arg1);
1745 else if (TREE_CODE (type) == REAL_TYPE)
1747 if (TREE_CODE (arg1) == INTEGER_CST)
1748 return build_real_from_int_cst (type, arg1);
1749 else if (TREE_CODE (arg1) == REAL_CST)
1750 return fold_convert_const_real_from_real (type, arg1);
1751 else if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_real_from_fixed (type, arg1);
1754 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1756 if (TREE_CODE (arg1) == FIXED_CST)
1757 return fold_convert_const_fixed_from_fixed (type, arg1);
1758 else if (TREE_CODE (arg1) == INTEGER_CST)
1759 return fold_convert_const_fixed_from_int (type, arg1);
1760 else if (TREE_CODE (arg1) == REAL_CST)
1761 return fold_convert_const_fixed_from_real (type, arg1);
1763 return NULL_TREE;
1766 /* Construct a vector of zero elements of vector type TYPE. */
1768 static tree
1769 build_zero_vector (tree type)
1771 tree t;
1773 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1774 return build_vector_from_val (type, t);
1777 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1779 bool
1780 fold_convertible_p (const_tree type, const_tree arg)
1782 tree orig = TREE_TYPE (arg);
1784 if (type == orig)
1785 return true;
1787 if (TREE_CODE (arg) == ERROR_MARK
1788 || TREE_CODE (type) == ERROR_MARK
1789 || TREE_CODE (orig) == ERROR_MARK)
1790 return false;
1792 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1793 return true;
1795 switch (TREE_CODE (type))
1797 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1798 case POINTER_TYPE: case REFERENCE_TYPE:
1799 case OFFSET_TYPE:
1800 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1801 || TREE_CODE (orig) == OFFSET_TYPE)
1802 return true;
1803 return (TREE_CODE (orig) == VECTOR_TYPE
1804 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1806 case REAL_TYPE:
1807 case FIXED_POINT_TYPE:
1808 case COMPLEX_TYPE:
1809 case VECTOR_TYPE:
1810 case VOID_TYPE:
1811 return TREE_CODE (type) == TREE_CODE (orig);
1813 default:
1814 return false;
1818 /* Convert expression ARG to type TYPE. Used by the middle-end for
1819 simple conversions in preference to calling the front-end's convert. */
1821 tree
1822 fold_convert_loc (location_t loc, tree type, tree arg)
1824 tree orig = TREE_TYPE (arg);
1825 tree tem;
1827 if (type == orig)
1828 return arg;
1830 if (TREE_CODE (arg) == ERROR_MARK
1831 || TREE_CODE (type) == ERROR_MARK
1832 || TREE_CODE (orig) == ERROR_MARK)
1833 return error_mark_node;
1835 switch (TREE_CODE (type))
1837 case POINTER_TYPE:
1838 case REFERENCE_TYPE:
1839 /* Handle conversions between pointers to different address spaces. */
1840 if (POINTER_TYPE_P (orig)
1841 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1842 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1843 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1844 /* fall through */
1846 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1847 case OFFSET_TYPE:
1848 if (TREE_CODE (arg) == INTEGER_CST)
1850 tem = fold_convert_const (NOP_EXPR, type, arg);
1851 if (tem != NULL_TREE)
1852 return tem;
1854 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1855 || TREE_CODE (orig) == OFFSET_TYPE)
1856 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1857 if (TREE_CODE (orig) == COMPLEX_TYPE)
1858 return fold_convert_loc (loc, type,
1859 fold_build1_loc (loc, REALPART_EXPR,
1860 TREE_TYPE (orig), arg));
1861 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1862 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1863 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1865 case REAL_TYPE:
1866 if (TREE_CODE (arg) == INTEGER_CST)
1868 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1869 if (tem != NULL_TREE)
1870 return tem;
1872 else if (TREE_CODE (arg) == REAL_CST)
1874 tem = fold_convert_const (NOP_EXPR, type, arg);
1875 if (tem != NULL_TREE)
1876 return tem;
1878 else if (TREE_CODE (arg) == FIXED_CST)
1880 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1881 if (tem != NULL_TREE)
1882 return tem;
1885 switch (TREE_CODE (orig))
1887 case INTEGER_TYPE:
1888 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1889 case POINTER_TYPE: case REFERENCE_TYPE:
1890 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1892 case REAL_TYPE:
1893 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 case FIXED_POINT_TYPE:
1896 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1898 case COMPLEX_TYPE:
1899 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1900 return fold_convert_loc (loc, type, tem);
1902 default:
1903 gcc_unreachable ();
1906 case FIXED_POINT_TYPE:
1907 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1908 || TREE_CODE (arg) == REAL_CST)
1910 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1912 goto fold_convert_exit;
1915 switch (TREE_CODE (orig))
1917 case FIXED_POINT_TYPE:
1918 case INTEGER_TYPE:
1919 case ENUMERAL_TYPE:
1920 case BOOLEAN_TYPE:
1921 case REAL_TYPE:
1922 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1924 case COMPLEX_TYPE:
1925 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1926 return fold_convert_loc (loc, type, tem);
1928 default:
1929 gcc_unreachable ();
1932 case COMPLEX_TYPE:
1933 switch (TREE_CODE (orig))
1935 case INTEGER_TYPE:
1936 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1937 case POINTER_TYPE: case REFERENCE_TYPE:
1938 case REAL_TYPE:
1939 case FIXED_POINT_TYPE:
1940 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1941 fold_convert_loc (loc, TREE_TYPE (type), arg),
1942 fold_convert_loc (loc, TREE_TYPE (type),
1943 integer_zero_node));
1944 case COMPLEX_TYPE:
1946 tree rpart, ipart;
1948 if (TREE_CODE (arg) == COMPLEX_EXPR)
1950 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1951 TREE_OPERAND (arg, 0));
1952 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1953 TREE_OPERAND (arg, 1));
1954 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1957 arg = save_expr (arg);
1958 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1959 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1960 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1961 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1962 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1965 default:
1966 gcc_unreachable ();
1969 case VECTOR_TYPE:
1970 if (integer_zerop (arg))
1971 return build_zero_vector (type);
1972 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1973 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1974 || TREE_CODE (orig) == VECTOR_TYPE);
1975 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1977 case VOID_TYPE:
1978 tem = fold_ignored_result (arg);
1979 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1981 default:
1982 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1983 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1984 gcc_unreachable ();
1986 fold_convert_exit:
1987 protected_set_expr_location_unshare (tem, loc);
1988 return tem;
1991 /* Return false if expr can be assumed not to be an lvalue, true
1992 otherwise. */
1994 static bool
1995 maybe_lvalue_p (const_tree x)
1997 /* We only need to wrap lvalue tree codes. */
1998 switch (TREE_CODE (x))
2000 case VAR_DECL:
2001 case PARM_DECL:
2002 case RESULT_DECL:
2003 case LABEL_DECL:
2004 case FUNCTION_DECL:
2005 case SSA_NAME:
2007 case COMPONENT_REF:
2008 case MEM_REF:
2009 case INDIRECT_REF:
2010 case ARRAY_REF:
2011 case ARRAY_RANGE_REF:
2012 case BIT_FIELD_REF:
2013 case OBJ_TYPE_REF:
2015 case REALPART_EXPR:
2016 case IMAGPART_EXPR:
2017 case PREINCREMENT_EXPR:
2018 case PREDECREMENT_EXPR:
2019 case SAVE_EXPR:
2020 case TRY_CATCH_EXPR:
2021 case WITH_CLEANUP_EXPR:
2022 case COMPOUND_EXPR:
2023 case MODIFY_EXPR:
2024 case TARGET_EXPR:
2025 case COND_EXPR:
2026 case BIND_EXPR:
2027 break;
2029 default:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2032 break;
2033 return false;
2036 return true;
2039 /* Return an expr equal to X but certainly not valid as an lvalue. */
2041 tree
2042 non_lvalue_loc (location_t loc, tree x)
2044 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2045 us. */
2046 if (in_gimple_form)
2047 return x;
2049 if (! maybe_lvalue_p (x))
2050 return x;
2051 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2054 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2055 Zero means allow extended lvalues. */
2057 int pedantic_lvalues;
2059 /* When pedantic, return an expr equal to X but certainly not valid as a
2060 pedantic lvalue. Otherwise, return X. */
2062 static tree
2063 pedantic_non_lvalue_loc (location_t loc, tree x)
2065 if (pedantic_lvalues)
2066 return non_lvalue_loc (loc, x);
2068 return protected_set_expr_location_unshare (x, loc);
2071 /* Given a tree comparison code, return the code that is the logical inverse.
2072 It is generally not safe to do this for floating-point comparisons, except
2073 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2074 ERROR_MARK in this case. */
2076 enum tree_code
2077 invert_tree_comparison (enum tree_code code, bool honor_nans)
2079 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2080 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2081 return ERROR_MARK;
2083 switch (code)
2085 case EQ_EXPR:
2086 return NE_EXPR;
2087 case NE_EXPR:
2088 return EQ_EXPR;
2089 case GT_EXPR:
2090 return honor_nans ? UNLE_EXPR : LE_EXPR;
2091 case GE_EXPR:
2092 return honor_nans ? UNLT_EXPR : LT_EXPR;
2093 case LT_EXPR:
2094 return honor_nans ? UNGE_EXPR : GE_EXPR;
2095 case LE_EXPR:
2096 return honor_nans ? UNGT_EXPR : GT_EXPR;
2097 case LTGT_EXPR:
2098 return UNEQ_EXPR;
2099 case UNEQ_EXPR:
2100 return LTGT_EXPR;
2101 case UNGT_EXPR:
2102 return LE_EXPR;
2103 case UNGE_EXPR:
2104 return LT_EXPR;
2105 case UNLT_EXPR:
2106 return GE_EXPR;
2107 case UNLE_EXPR:
2108 return GT_EXPR;
2109 case ORDERED_EXPR:
2110 return UNORDERED_EXPR;
2111 case UNORDERED_EXPR:
2112 return ORDERED_EXPR;
2113 default:
2114 gcc_unreachable ();
2118 /* Similar, but return the comparison that results if the operands are
2119 swapped. This is safe for floating-point. */
2121 enum tree_code
2122 swap_tree_comparison (enum tree_code code)
2124 switch (code)
2126 case EQ_EXPR:
2127 case NE_EXPR:
2128 case ORDERED_EXPR:
2129 case UNORDERED_EXPR:
2130 case LTGT_EXPR:
2131 case UNEQ_EXPR:
2132 return code;
2133 case GT_EXPR:
2134 return LT_EXPR;
2135 case GE_EXPR:
2136 return LE_EXPR;
2137 case LT_EXPR:
2138 return GT_EXPR;
2139 case LE_EXPR:
2140 return GE_EXPR;
2141 case UNGT_EXPR:
2142 return UNLT_EXPR;
2143 case UNGE_EXPR:
2144 return UNLE_EXPR;
2145 case UNLT_EXPR:
2146 return UNGT_EXPR;
2147 case UNLE_EXPR:
2148 return UNGE_EXPR;
2149 default:
2150 gcc_unreachable ();
2155 /* Convert a comparison tree code from an enum tree_code representation
2156 into a compcode bit-based encoding. This function is the inverse of
2157 compcode_to_comparison. */
2159 static enum comparison_code
2160 comparison_to_compcode (enum tree_code code)
2162 switch (code)
2164 case LT_EXPR:
2165 return COMPCODE_LT;
2166 case EQ_EXPR:
2167 return COMPCODE_EQ;
2168 case LE_EXPR:
2169 return COMPCODE_LE;
2170 case GT_EXPR:
2171 return COMPCODE_GT;
2172 case NE_EXPR:
2173 return COMPCODE_NE;
2174 case GE_EXPR:
2175 return COMPCODE_GE;
2176 case ORDERED_EXPR:
2177 return COMPCODE_ORD;
2178 case UNORDERED_EXPR:
2179 return COMPCODE_UNORD;
2180 case UNLT_EXPR:
2181 return COMPCODE_UNLT;
2182 case UNEQ_EXPR:
2183 return COMPCODE_UNEQ;
2184 case UNLE_EXPR:
2185 return COMPCODE_UNLE;
2186 case UNGT_EXPR:
2187 return COMPCODE_UNGT;
2188 case LTGT_EXPR:
2189 return COMPCODE_LTGT;
2190 case UNGE_EXPR:
2191 return COMPCODE_UNGE;
2192 default:
2193 gcc_unreachable ();
2197 /* Convert a compcode bit-based encoding of a comparison operator back
2198 to GCC's enum tree_code representation. This function is the
2199 inverse of comparison_to_compcode. */
2201 static enum tree_code
2202 compcode_to_comparison (enum comparison_code code)
2204 switch (code)
2206 case COMPCODE_LT:
2207 return LT_EXPR;
2208 case COMPCODE_EQ:
2209 return EQ_EXPR;
2210 case COMPCODE_LE:
2211 return LE_EXPR;
2212 case COMPCODE_GT:
2213 return GT_EXPR;
2214 case COMPCODE_NE:
2215 return NE_EXPR;
2216 case COMPCODE_GE:
2217 return GE_EXPR;
2218 case COMPCODE_ORD:
2219 return ORDERED_EXPR;
2220 case COMPCODE_UNORD:
2221 return UNORDERED_EXPR;
2222 case COMPCODE_UNLT:
2223 return UNLT_EXPR;
2224 case COMPCODE_UNEQ:
2225 return UNEQ_EXPR;
2226 case COMPCODE_UNLE:
2227 return UNLE_EXPR;
2228 case COMPCODE_UNGT:
2229 return UNGT_EXPR;
2230 case COMPCODE_LTGT:
2231 return LTGT_EXPR;
2232 case COMPCODE_UNGE:
2233 return UNGE_EXPR;
2234 default:
2235 gcc_unreachable ();
2239 /* Return a tree for the comparison which is the combination of
2240 doing the AND or OR (depending on CODE) of the two operations LCODE
2241 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2242 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2243 if this makes the transformation invalid. */
2245 tree
2246 combine_comparisons (location_t loc,
2247 enum tree_code code, enum tree_code lcode,
2248 enum tree_code rcode, tree truth_type,
2249 tree ll_arg, tree lr_arg)
2251 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2252 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2253 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2254 int compcode;
2256 switch (code)
2258 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2259 compcode = lcompcode & rcompcode;
2260 break;
2262 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2263 compcode = lcompcode | rcompcode;
2264 break;
2266 default:
2267 return NULL_TREE;
2270 if (!honor_nans)
2272 /* Eliminate unordered comparisons, as well as LTGT and ORD
2273 which are not used unless the mode has NaNs. */
2274 compcode &= ~COMPCODE_UNORD;
2275 if (compcode == COMPCODE_LTGT)
2276 compcode = COMPCODE_NE;
2277 else if (compcode == COMPCODE_ORD)
2278 compcode = COMPCODE_TRUE;
2280 else if (flag_trapping_math)
2282 /* Check that the original operation and the optimized ones will trap
2283 under the same condition. */
2284 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2285 && (lcompcode != COMPCODE_EQ)
2286 && (lcompcode != COMPCODE_ORD);
2287 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2288 && (rcompcode != COMPCODE_EQ)
2289 && (rcompcode != COMPCODE_ORD);
2290 bool trap = (compcode & COMPCODE_UNORD) == 0
2291 && (compcode != COMPCODE_EQ)
2292 && (compcode != COMPCODE_ORD);
2294 /* In a short-circuited boolean expression the LHS might be
2295 such that the RHS, if evaluated, will never trap. For
2296 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2297 if neither x nor y is NaN. (This is a mixed blessing: for
2298 example, the expression above will never trap, hence
2299 optimizing it to x < y would be invalid). */
2300 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2301 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2302 rtrap = false;
2304 /* If the comparison was short-circuited, and only the RHS
2305 trapped, we may now generate a spurious trap. */
2306 if (rtrap && !ltrap
2307 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2308 return NULL_TREE;
2310 /* If we changed the conditions that cause a trap, we lose. */
2311 if ((ltrap || rtrap) != trap)
2312 return NULL_TREE;
2315 if (compcode == COMPCODE_TRUE)
2316 return constant_boolean_node (true, truth_type);
2317 else if (compcode == COMPCODE_FALSE)
2318 return constant_boolean_node (false, truth_type);
2319 else
2321 enum tree_code tcode;
2323 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2324 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2359 || TREE_TYPE (arg0) == error_mark_node
2360 || TREE_TYPE (arg1) == error_mark_node)
2361 return 0;
2363 /* Similar, if either does not have a type (like a released SSA name),
2364 they aren't equal. */
2365 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2366 return 0;
2368 /* Check equality of integer constants before bailing out due to
2369 precision differences. */
2370 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2371 return tree_int_cst_equal (arg0, arg1);
2373 /* If both types don't have the same signedness, then we can't consider
2374 them equal. We must check this before the STRIP_NOPS calls
2375 because they may change the signedness of the arguments. As pointers
2376 strictly don't have a signedness, require either two pointers or
2377 two non-pointers as well. */
2378 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2379 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2380 return 0;
2382 /* We cannot consider pointers to different address space equal. */
2383 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2384 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2385 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2386 return 0;
2388 /* If both types don't have the same precision, then it is not safe
2389 to strip NOPs. */
2390 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2391 return 0;
2393 STRIP_NOPS (arg0);
2394 STRIP_NOPS (arg1);
2396 /* In case both args are comparisons but with different comparison
2397 code, try to swap the comparison operands of one arg to produce
2398 a match and compare that variant. */
2399 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2400 && COMPARISON_CLASS_P (arg0)
2401 && COMPARISON_CLASS_P (arg1))
2403 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2405 if (TREE_CODE (arg0) == swap_code)
2406 return operand_equal_p (TREE_OPERAND (arg0, 0),
2407 TREE_OPERAND (arg1, 1), flags)
2408 && operand_equal_p (TREE_OPERAND (arg0, 1),
2409 TREE_OPERAND (arg1, 0), flags);
2412 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2413 /* This is needed for conversions and for COMPONENT_REF.
2414 Might as well play it safe and always test this. */
2415 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2416 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2417 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2418 return 0;
2420 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2421 We don't care about side effects in that case because the SAVE_EXPR
2422 takes care of that for us. In all other cases, two expressions are
2423 equal if they have no side effects. If we have two identical
2424 expressions with side effects that should be treated the same due
2425 to the only side effects being identical SAVE_EXPR's, that will
2426 be detected in the recursive calls below.
2427 If we are taking an invariant address of two identical objects
2428 they are necessarily equal as well. */
2429 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2430 && (TREE_CODE (arg0) == SAVE_EXPR
2431 || (flags & OEP_CONSTANT_ADDRESS_OF)
2432 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2433 return 1;
2435 /* Next handle constant cases, those for which we can return 1 even
2436 if ONLY_CONST is set. */
2437 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2438 switch (TREE_CODE (arg0))
2440 case INTEGER_CST:
2441 return tree_int_cst_equal (arg0, arg1);
2443 case FIXED_CST:
2444 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2445 TREE_FIXED_CST (arg1));
2447 case REAL_CST:
2448 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2449 TREE_REAL_CST (arg1)))
2450 return 1;
2453 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2455 /* If we do not distinguish between signed and unsigned zero,
2456 consider them equal. */
2457 if (real_zerop (arg0) && real_zerop (arg1))
2458 return 1;
2460 return 0;
2462 case VECTOR_CST:
2464 unsigned i;
2466 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2467 return 0;
2469 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2471 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2472 VECTOR_CST_ELT (arg1, i), flags))
2473 return 0;
2475 return 1;
2478 case COMPLEX_CST:
2479 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2480 flags)
2481 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2482 flags));
2484 case STRING_CST:
2485 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2486 && ! memcmp (TREE_STRING_POINTER (arg0),
2487 TREE_STRING_POINTER (arg1),
2488 TREE_STRING_LENGTH (arg0)));
2490 case ADDR_EXPR:
2491 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2492 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2493 ? OEP_CONSTANT_ADDRESS_OF : 0);
2494 default:
2495 break;
2498 if (flags & OEP_ONLY_CONST)
2499 return 0;
2501 /* Define macros to test an operand from arg0 and arg1 for equality and a
2502 variant that allows null and views null as being different from any
2503 non-null value. In the latter case, if either is null, the both
2504 must be; otherwise, do the normal comparison. */
2505 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2506 TREE_OPERAND (arg1, N), flags)
2508 #define OP_SAME_WITH_NULL(N) \
2509 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2510 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2512 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2514 case tcc_unary:
2515 /* Two conversions are equal only if signedness and modes match. */
2516 switch (TREE_CODE (arg0))
2518 CASE_CONVERT:
2519 case FIX_TRUNC_EXPR:
2520 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2521 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2522 return 0;
2523 break;
2524 default:
2525 break;
2528 return OP_SAME (0);
2531 case tcc_comparison:
2532 case tcc_binary:
2533 if (OP_SAME (0) && OP_SAME (1))
2534 return 1;
2536 /* For commutative ops, allow the other order. */
2537 return (commutative_tree_code (TREE_CODE (arg0))
2538 && operand_equal_p (TREE_OPERAND (arg0, 0),
2539 TREE_OPERAND (arg1, 1), flags)
2540 && operand_equal_p (TREE_OPERAND (arg0, 1),
2541 TREE_OPERAND (arg1, 0), flags));
2543 case tcc_reference:
2544 /* If either of the pointer (or reference) expressions we are
2545 dereferencing contain a side effect, these cannot be equal. */
2546 if (TREE_SIDE_EFFECTS (arg0)
2547 || TREE_SIDE_EFFECTS (arg1))
2548 return 0;
2550 switch (TREE_CODE (arg0))
2552 case INDIRECT_REF:
2553 case REALPART_EXPR:
2554 case IMAGPART_EXPR:
2555 return OP_SAME (0);
2557 case TARGET_MEM_REF:
2558 /* Require equal extra operands and then fall through to MEM_REF
2559 handling of the two common operands. */
2560 if (!OP_SAME_WITH_NULL (2)
2561 || !OP_SAME_WITH_NULL (3)
2562 || !OP_SAME_WITH_NULL (4))
2563 return 0;
2564 /* Fallthru. */
2565 case MEM_REF:
2566 /* Require equal access sizes, and similar pointer types.
2567 We can have incomplete types for array references of
2568 variable-sized arrays from the Fortran frontent
2569 though. */
2570 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2571 || (TYPE_SIZE (TREE_TYPE (arg0))
2572 && TYPE_SIZE (TREE_TYPE (arg1))
2573 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2574 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2575 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2576 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2577 && OP_SAME (0) && OP_SAME (1));
2579 case ARRAY_REF:
2580 case ARRAY_RANGE_REF:
2581 /* Operands 2 and 3 may be null.
2582 Compare the array index by value if it is constant first as we
2583 may have different types but same value here. */
2584 return (OP_SAME (0)
2585 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2586 TREE_OPERAND (arg1, 1))
2587 || OP_SAME (1))
2588 && OP_SAME_WITH_NULL (2)
2589 && OP_SAME_WITH_NULL (3));
2591 case COMPONENT_REF:
2592 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2593 may be NULL when we're called to compare MEM_EXPRs. */
2594 return OP_SAME_WITH_NULL (0)
2595 && OP_SAME (1)
2596 && OP_SAME_WITH_NULL (2);
2598 case BIT_FIELD_REF:
2599 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2601 default:
2602 return 0;
2605 case tcc_expression:
2606 switch (TREE_CODE (arg0))
2608 case ADDR_EXPR:
2609 case TRUTH_NOT_EXPR:
2610 return OP_SAME (0);
2612 case TRUTH_ANDIF_EXPR:
2613 case TRUTH_ORIF_EXPR:
2614 return OP_SAME (0) && OP_SAME (1);
2616 case FMA_EXPR:
2617 case WIDEN_MULT_PLUS_EXPR:
2618 case WIDEN_MULT_MINUS_EXPR:
2619 if (!OP_SAME (2))
2620 return 0;
2621 /* The multiplcation operands are commutative. */
2622 /* FALLTHRU */
2624 case TRUTH_AND_EXPR:
2625 case TRUTH_OR_EXPR:
2626 case TRUTH_XOR_EXPR:
2627 if (OP_SAME (0) && OP_SAME (1))
2628 return 1;
2630 /* Otherwise take into account this is a commutative operation. */
2631 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2632 TREE_OPERAND (arg1, 1), flags)
2633 && operand_equal_p (TREE_OPERAND (arg0, 1),
2634 TREE_OPERAND (arg1, 0), flags));
2636 case COND_EXPR:
2637 case VEC_COND_EXPR:
2638 case DOT_PROD_EXPR:
2639 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2641 default:
2642 return 0;
2645 case tcc_vl_exp:
2646 switch (TREE_CODE (arg0))
2648 case CALL_EXPR:
2649 /* If the CALL_EXPRs call different functions, then they
2650 clearly can not be equal. */
2651 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2652 flags))
2653 return 0;
2656 unsigned int cef = call_expr_flags (arg0);
2657 if (flags & OEP_PURE_SAME)
2658 cef &= ECF_CONST | ECF_PURE;
2659 else
2660 cef &= ECF_CONST;
2661 if (!cef)
2662 return 0;
2665 /* Now see if all the arguments are the same. */
2667 const_call_expr_arg_iterator iter0, iter1;
2668 const_tree a0, a1;
2669 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2670 a1 = first_const_call_expr_arg (arg1, &iter1);
2671 a0 && a1;
2672 a0 = next_const_call_expr_arg (&iter0),
2673 a1 = next_const_call_expr_arg (&iter1))
2674 if (! operand_equal_p (a0, a1, flags))
2675 return 0;
2677 /* If we get here and both argument lists are exhausted
2678 then the CALL_EXPRs are equal. */
2679 return ! (a0 || a1);
2681 default:
2682 return 0;
2685 case tcc_declaration:
2686 /* Consider __builtin_sqrt equal to sqrt. */
2687 return (TREE_CODE (arg0) == FUNCTION_DECL
2688 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2689 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2690 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2692 default:
2693 return 0;
2696 #undef OP_SAME
2697 #undef OP_SAME_WITH_NULL
2700 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2701 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2703 When in doubt, return 0. */
2705 static int
2706 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2708 int unsignedp1, unsignedpo;
2709 tree primarg0, primarg1, primother;
2710 unsigned int correct_width;
2712 if (operand_equal_p (arg0, arg1, 0))
2713 return 1;
2715 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2716 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2717 return 0;
2719 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2720 and see if the inner values are the same. This removes any
2721 signedness comparison, which doesn't matter here. */
2722 primarg0 = arg0, primarg1 = arg1;
2723 STRIP_NOPS (primarg0);
2724 STRIP_NOPS (primarg1);
2725 if (operand_equal_p (primarg0, primarg1, 0))
2726 return 1;
2728 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2729 actual comparison operand, ARG0.
2731 First throw away any conversions to wider types
2732 already present in the operands. */
2734 primarg1 = get_narrower (arg1, &unsignedp1);
2735 primother = get_narrower (other, &unsignedpo);
2737 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2738 if (unsignedp1 == unsignedpo
2739 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2740 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2742 tree type = TREE_TYPE (arg0);
2744 /* Make sure shorter operand is extended the right way
2745 to match the longer operand. */
2746 primarg1 = fold_convert (signed_or_unsigned_type_for
2747 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2749 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2750 return 1;
2753 return 0;
2756 /* See if ARG is an expression that is either a comparison or is performing
2757 arithmetic on comparisons. The comparisons must only be comparing
2758 two different values, which will be stored in *CVAL1 and *CVAL2; if
2759 they are nonzero it means that some operands have already been found.
2760 No variables may be used anywhere else in the expression except in the
2761 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2762 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2764 If this is true, return 1. Otherwise, return zero. */
2766 static int
2767 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2769 enum tree_code code = TREE_CODE (arg);
2770 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2772 /* We can handle some of the tcc_expression cases here. */
2773 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2774 tclass = tcc_unary;
2775 else if (tclass == tcc_expression
2776 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2777 || code == COMPOUND_EXPR))
2778 tclass = tcc_binary;
2780 else if (tclass == tcc_expression && code == SAVE_EXPR
2781 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2783 /* If we've already found a CVAL1 or CVAL2, this expression is
2784 two complex to handle. */
2785 if (*cval1 || *cval2)
2786 return 0;
2788 tclass = tcc_unary;
2789 *save_p = 1;
2792 switch (tclass)
2794 case tcc_unary:
2795 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2797 case tcc_binary:
2798 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2799 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2800 cval1, cval2, save_p));
2802 case tcc_constant:
2803 return 1;
2805 case tcc_expression:
2806 if (code == COND_EXPR)
2807 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2808 cval1, cval2, save_p)
2809 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2810 cval1, cval2, save_p)
2811 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2812 cval1, cval2, save_p));
2813 return 0;
2815 case tcc_comparison:
2816 /* First see if we can handle the first operand, then the second. For
2817 the second operand, we know *CVAL1 can't be zero. It must be that
2818 one side of the comparison is each of the values; test for the
2819 case where this isn't true by failing if the two operands
2820 are the same. */
2822 if (operand_equal_p (TREE_OPERAND (arg, 0),
2823 TREE_OPERAND (arg, 1), 0))
2824 return 0;
2826 if (*cval1 == 0)
2827 *cval1 = TREE_OPERAND (arg, 0);
2828 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2830 else if (*cval2 == 0)
2831 *cval2 = TREE_OPERAND (arg, 0);
2832 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2834 else
2835 return 0;
2837 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2839 else if (*cval2 == 0)
2840 *cval2 = TREE_OPERAND (arg, 1);
2841 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2843 else
2844 return 0;
2846 return 1;
2848 default:
2849 return 0;
2853 /* ARG is a tree that is known to contain just arithmetic operations and
2854 comparisons. Evaluate the operations in the tree substituting NEW0 for
2855 any occurrence of OLD0 as an operand of a comparison and likewise for
2856 NEW1 and OLD1. */
2858 static tree
2859 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2860 tree old1, tree new1)
2862 tree type = TREE_TYPE (arg);
2863 enum tree_code code = TREE_CODE (arg);
2864 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2866 /* We can handle some of the tcc_expression cases here. */
2867 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2868 tclass = tcc_unary;
2869 else if (tclass == tcc_expression
2870 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2871 tclass = tcc_binary;
2873 switch (tclass)
2875 case tcc_unary:
2876 return fold_build1_loc (loc, code, type,
2877 eval_subst (loc, TREE_OPERAND (arg, 0),
2878 old0, new0, old1, new1));
2880 case tcc_binary:
2881 return fold_build2_loc (loc, code, type,
2882 eval_subst (loc, TREE_OPERAND (arg, 0),
2883 old0, new0, old1, new1),
2884 eval_subst (loc, TREE_OPERAND (arg, 1),
2885 old0, new0, old1, new1));
2887 case tcc_expression:
2888 switch (code)
2890 case SAVE_EXPR:
2891 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2892 old1, new1);
2894 case COMPOUND_EXPR:
2895 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2896 old1, new1);
2898 case COND_EXPR:
2899 return fold_build3_loc (loc, code, type,
2900 eval_subst (loc, TREE_OPERAND (arg, 0),
2901 old0, new0, old1, new1),
2902 eval_subst (loc, TREE_OPERAND (arg, 1),
2903 old0, new0, old1, new1),
2904 eval_subst (loc, TREE_OPERAND (arg, 2),
2905 old0, new0, old1, new1));
2906 default:
2907 break;
2909 /* Fall through - ??? */
2911 case tcc_comparison:
2913 tree arg0 = TREE_OPERAND (arg, 0);
2914 tree arg1 = TREE_OPERAND (arg, 1);
2916 /* We need to check both for exact equality and tree equality. The
2917 former will be true if the operand has a side-effect. In that
2918 case, we know the operand occurred exactly once. */
2920 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2921 arg0 = new0;
2922 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2923 arg0 = new1;
2925 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2926 arg1 = new0;
2927 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2928 arg1 = new1;
2930 return fold_build2_loc (loc, code, type, arg0, arg1);
2933 default:
2934 return arg;
2938 /* Return a tree for the case when the result of an expression is RESULT
2939 converted to TYPE and OMITTED was previously an operand of the expression
2940 but is now not needed (e.g., we folded OMITTED * 0).
2942 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2943 the conversion of RESULT to TYPE. */
2945 tree
2946 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2948 tree t = fold_convert_loc (loc, type, result);
2950 /* If the resulting operand is an empty statement, just return the omitted
2951 statement casted to void. */
2952 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2953 return build1_loc (loc, NOP_EXPR, void_type_node,
2954 fold_ignored_result (omitted));
2956 if (TREE_SIDE_EFFECTS (omitted))
2957 return build2_loc (loc, COMPOUND_EXPR, type,
2958 fold_ignored_result (omitted), t);
2960 return non_lvalue_loc (loc, t);
2963 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2965 static tree
2966 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2967 tree omitted)
2969 tree t = fold_convert_loc (loc, type, result);
2971 /* If the resulting operand is an empty statement, just return the omitted
2972 statement casted to void. */
2973 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2974 return build1_loc (loc, NOP_EXPR, void_type_node,
2975 fold_ignored_result (omitted));
2977 if (TREE_SIDE_EFFECTS (omitted))
2978 return build2_loc (loc, COMPOUND_EXPR, type,
2979 fold_ignored_result (omitted), t);
2981 return pedantic_non_lvalue_loc (loc, t);
2984 /* Return a tree for the case when the result of an expression is RESULT
2985 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2986 of the expression but are now not needed.
2988 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2989 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2990 evaluated before OMITTED2. Otherwise, if neither has side effects,
2991 just do the conversion of RESULT to TYPE. */
2993 tree
2994 omit_two_operands_loc (location_t loc, tree type, tree result,
2995 tree omitted1, tree omitted2)
2997 tree t = fold_convert_loc (loc, type, result);
2999 if (TREE_SIDE_EFFECTS (omitted2))
3000 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3001 if (TREE_SIDE_EFFECTS (omitted1))
3002 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3004 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3008 /* Return a simplified tree node for the truth-negation of ARG. This
3009 never alters ARG itself. We assume that ARG is an operation that
3010 returns a truth value (0 or 1).
3012 FIXME: one would think we would fold the result, but it causes
3013 problems with the dominator optimizer. */
3015 tree
3016 fold_truth_not_expr (location_t loc, tree arg)
3018 tree type = TREE_TYPE (arg);
3019 enum tree_code code = TREE_CODE (arg);
3020 location_t loc1, loc2;
3022 /* If this is a comparison, we can simply invert it, except for
3023 floating-point non-equality comparisons, in which case we just
3024 enclose a TRUTH_NOT_EXPR around what we have. */
3026 if (TREE_CODE_CLASS (code) == tcc_comparison)
3028 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3029 if (FLOAT_TYPE_P (op_type)
3030 && flag_trapping_math
3031 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3032 && code != NE_EXPR && code != EQ_EXPR)
3033 return NULL_TREE;
3035 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3036 if (code == ERROR_MARK)
3037 return NULL_TREE;
3039 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3040 TREE_OPERAND (arg, 1));
3043 switch (code)
3045 case INTEGER_CST:
3046 return constant_boolean_node (integer_zerop (arg), type);
3048 case TRUTH_AND_EXPR:
3049 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3050 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3051 return build2_loc (loc, TRUTH_OR_EXPR, type,
3052 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3053 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3055 case TRUTH_OR_EXPR:
3056 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3057 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3058 return build2_loc (loc, TRUTH_AND_EXPR, type,
3059 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3060 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3062 case TRUTH_XOR_EXPR:
3063 /* Here we can invert either operand. We invert the first operand
3064 unless the second operand is a TRUTH_NOT_EXPR in which case our
3065 result is the XOR of the first operand with the inside of the
3066 negation of the second operand. */
3068 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3069 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3070 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3071 else
3072 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3073 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3074 TREE_OPERAND (arg, 1));
3076 case TRUTH_ANDIF_EXPR:
3077 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3078 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3079 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3080 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3081 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3083 case TRUTH_ORIF_EXPR:
3084 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3085 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3086 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3087 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3088 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3090 case TRUTH_NOT_EXPR:
3091 return TREE_OPERAND (arg, 0);
3093 case COND_EXPR:
3095 tree arg1 = TREE_OPERAND (arg, 1);
3096 tree arg2 = TREE_OPERAND (arg, 2);
3098 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3099 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3101 /* A COND_EXPR may have a throw as one operand, which
3102 then has void type. Just leave void operands
3103 as they are. */
3104 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3105 VOID_TYPE_P (TREE_TYPE (arg1))
3106 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3107 VOID_TYPE_P (TREE_TYPE (arg2))
3108 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3111 case COMPOUND_EXPR:
3112 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3113 return build2_loc (loc, COMPOUND_EXPR, type,
3114 TREE_OPERAND (arg, 0),
3115 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3117 case NON_LVALUE_EXPR:
3118 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3119 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3121 CASE_CONVERT:
3122 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3123 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3125 /* ... fall through ... */
3127 case FLOAT_EXPR:
3128 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3129 return build1_loc (loc, TREE_CODE (arg), type,
3130 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3132 case BIT_AND_EXPR:
3133 if (!integer_onep (TREE_OPERAND (arg, 1)))
3134 return NULL_TREE;
3135 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3137 case SAVE_EXPR:
3138 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3140 case CLEANUP_POINT_EXPR:
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3142 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3143 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3145 default:
3146 return NULL_TREE;
3150 /* Return a simplified tree node for the truth-negation of ARG. This
3151 never alters ARG itself. We assume that ARG is an operation that
3152 returns a truth value (0 or 1).
3154 FIXME: one would think we would fold the result, but it causes
3155 problems with the dominator optimizer. */
3157 tree
3158 invert_truthvalue_loc (location_t loc, tree arg)
3160 tree tem;
3162 if (TREE_CODE (arg) == ERROR_MARK)
3163 return arg;
3165 tem = fold_truth_not_expr (loc, arg);
3166 if (!tem)
3167 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3169 return tem;
3172 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3173 operands are another bit-wise operation with a common input. If so,
3174 distribute the bit operations to save an operation and possibly two if
3175 constants are involved. For example, convert
3176 (A | B) & (A | C) into A | (B & C)
3177 Further simplification will occur if B and C are constants.
3179 If this optimization cannot be done, 0 will be returned. */
3181 static tree
3182 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3183 tree arg0, tree arg1)
3185 tree common;
3186 tree left, right;
3188 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3189 || TREE_CODE (arg0) == code
3190 || (TREE_CODE (arg0) != BIT_AND_EXPR
3191 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3192 return 0;
3194 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3196 common = TREE_OPERAND (arg0, 0);
3197 left = TREE_OPERAND (arg0, 1);
3198 right = TREE_OPERAND (arg1, 1);
3200 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3202 common = TREE_OPERAND (arg0, 0);
3203 left = TREE_OPERAND (arg0, 1);
3204 right = TREE_OPERAND (arg1, 0);
3206 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3208 common = TREE_OPERAND (arg0, 1);
3209 left = TREE_OPERAND (arg0, 0);
3210 right = TREE_OPERAND (arg1, 1);
3212 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3214 common = TREE_OPERAND (arg0, 1);
3215 left = TREE_OPERAND (arg0, 0);
3216 right = TREE_OPERAND (arg1, 0);
3218 else
3219 return 0;
3221 common = fold_convert_loc (loc, type, common);
3222 left = fold_convert_loc (loc, type, left);
3223 right = fold_convert_loc (loc, type, right);
3224 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3225 fold_build2_loc (loc, code, type, left, right));
3228 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3229 with code CODE. This optimization is unsafe. */
3230 static tree
3231 distribute_real_division (location_t loc, enum tree_code code, tree type,
3232 tree arg0, tree arg1)
3234 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3235 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3237 /* (A / C) +- (B / C) -> (A +- B) / C. */
3238 if (mul0 == mul1
3239 && operand_equal_p (TREE_OPERAND (arg0, 1),
3240 TREE_OPERAND (arg1, 1), 0))
3241 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3242 fold_build2_loc (loc, code, type,
3243 TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0)),
3245 TREE_OPERAND (arg0, 1));
3247 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3248 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3249 TREE_OPERAND (arg1, 0), 0)
3250 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3251 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3253 REAL_VALUE_TYPE r0, r1;
3254 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3255 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3256 if (!mul0)
3257 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3258 if (!mul1)
3259 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3260 real_arithmetic (&r0, code, &r0, &r1);
3261 return fold_build2_loc (loc, MULT_EXPR, type,
3262 TREE_OPERAND (arg0, 0),
3263 build_real (type, r0));
3266 return NULL_TREE;
3269 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3270 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3272 static tree
3273 make_bit_field_ref (location_t loc, tree inner, tree type,
3274 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3276 tree result, bftype;
3278 if (bitpos == 0)
3280 tree size = TYPE_SIZE (TREE_TYPE (inner));
3281 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3282 || POINTER_TYPE_P (TREE_TYPE (inner)))
3283 && host_integerp (size, 0)
3284 && tree_low_cst (size, 0) == bitsize)
3285 return fold_convert_loc (loc, type, inner);
3288 bftype = type;
3289 if (TYPE_PRECISION (bftype) != bitsize
3290 || TYPE_UNSIGNED (bftype) == !unsignedp)
3291 bftype = build_nonstandard_integer_type (bitsize, 0);
3293 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3294 size_int (bitsize), bitsize_int (bitpos));
3296 if (bftype != type)
3297 result = fold_convert_loc (loc, type, result);
3299 return result;
3302 /* Optimize a bit-field compare.
3304 There are two cases: First is a compare against a constant and the
3305 second is a comparison of two items where the fields are at the same
3306 bit position relative to the start of a chunk (byte, halfword, word)
3307 large enough to contain it. In these cases we can avoid the shift
3308 implicit in bitfield extractions.
3310 For constants, we emit a compare of the shifted constant with the
3311 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3312 compared. For two fields at the same position, we do the ANDs with the
3313 similar mask and compare the result of the ANDs.
3315 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3316 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3317 are the left and right operands of the comparison, respectively.
3319 If the optimization described above can be done, we return the resulting
3320 tree. Otherwise we return zero. */
3322 static tree
3323 optimize_bit_field_compare (location_t loc, enum tree_code code,
3324 tree compare_type, tree lhs, tree rhs)
3326 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3327 tree type = TREE_TYPE (lhs);
3328 tree signed_type, unsigned_type;
3329 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3330 enum machine_mode lmode, rmode, nmode;
3331 int lunsignedp, runsignedp;
3332 int lvolatilep = 0, rvolatilep = 0;
3333 tree linner, rinner = NULL_TREE;
3334 tree mask;
3335 tree offset;
3337 /* In the strict volatile bitfields case, doing code changes here may prevent
3338 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3339 if (flag_strict_volatile_bitfields > 0)
3340 return 0;
3342 /* Get all the information about the extractions being done. If the bit size
3343 if the same as the size of the underlying object, we aren't doing an
3344 extraction at all and so can do nothing. We also don't want to
3345 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3346 then will no longer be able to replace it. */
3347 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3348 &lunsignedp, &lvolatilep, false);
3349 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3350 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3351 return 0;
3353 if (!const_p)
3355 /* If this is not a constant, we can only do something if bit positions,
3356 sizes, and signedness are the same. */
3357 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3358 &runsignedp, &rvolatilep, false);
3360 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3361 || lunsignedp != runsignedp || offset != 0
3362 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3363 return 0;
3366 /* See if we can find a mode to refer to this field. We should be able to,
3367 but fail if we can't. */
3368 if (lvolatilep
3369 && GET_MODE_BITSIZE (lmode) > 0
3370 && flag_strict_volatile_bitfields > 0)
3371 nmode = lmode;
3372 else
3373 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3374 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3375 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3376 TYPE_ALIGN (TREE_TYPE (rinner))),
3377 word_mode, lvolatilep || rvolatilep);
3378 if (nmode == VOIDmode)
3379 return 0;
3381 /* Set signed and unsigned types of the precision of this mode for the
3382 shifts below. */
3383 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3384 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3386 /* Compute the bit position and size for the new reference and our offset
3387 within it. If the new reference is the same size as the original, we
3388 won't optimize anything, so return zero. */
3389 nbitsize = GET_MODE_BITSIZE (nmode);
3390 nbitpos = lbitpos & ~ (nbitsize - 1);
3391 lbitpos -= nbitpos;
3392 if (nbitsize == lbitsize)
3393 return 0;
3395 if (BYTES_BIG_ENDIAN)
3396 lbitpos = nbitsize - lbitsize - lbitpos;
3398 /* Make the mask to be used against the extracted field. */
3399 mask = build_int_cst_type (unsigned_type, -1);
3400 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3401 mask = const_binop (RSHIFT_EXPR, mask,
3402 size_int (nbitsize - lbitsize - lbitpos));
3404 if (! const_p)
3405 /* If not comparing with constant, just rework the comparison
3406 and return. */
3407 return fold_build2_loc (loc, code, compare_type,
3408 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3409 make_bit_field_ref (loc, linner,
3410 unsigned_type,
3411 nbitsize, nbitpos,
3413 mask),
3414 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3415 make_bit_field_ref (loc, rinner,
3416 unsigned_type,
3417 nbitsize, nbitpos,
3419 mask));
3421 /* Otherwise, we are handling the constant case. See if the constant is too
3422 big for the field. Warn and return a tree of for 0 (false) if so. We do
3423 this not only for its own sake, but to avoid having to test for this
3424 error case below. If we didn't, we might generate wrong code.
3426 For unsigned fields, the constant shifted right by the field length should
3427 be all zero. For signed fields, the high-order bits should agree with
3428 the sign bit. */
3430 if (lunsignedp)
3432 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3433 fold_convert_loc (loc,
3434 unsigned_type, rhs),
3435 size_int (lbitsize))))
3437 warning (0, "comparison is always %d due to width of bit-field",
3438 code == NE_EXPR);
3439 return constant_boolean_node (code == NE_EXPR, compare_type);
3442 else
3444 tree tem = const_binop (RSHIFT_EXPR,
3445 fold_convert_loc (loc, signed_type, rhs),
3446 size_int (lbitsize - 1));
3447 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3449 warning (0, "comparison is always %d due to width of bit-field",
3450 code == NE_EXPR);
3451 return constant_boolean_node (code == NE_EXPR, compare_type);
3455 /* Single-bit compares should always be against zero. */
3456 if (lbitsize == 1 && ! integer_zerop (rhs))
3458 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3459 rhs = build_int_cst (type, 0);
3462 /* Make a new bitfield reference, shift the constant over the
3463 appropriate number of bits and mask it with the computed mask
3464 (in case this was a signed field). If we changed it, make a new one. */
3465 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3466 if (lvolatilep)
3468 TREE_SIDE_EFFECTS (lhs) = 1;
3469 TREE_THIS_VOLATILE (lhs) = 1;
3472 rhs = const_binop (BIT_AND_EXPR,
3473 const_binop (LSHIFT_EXPR,
3474 fold_convert_loc (loc, unsigned_type, rhs),
3475 size_int (lbitpos)),
3476 mask);
3478 lhs = build2_loc (loc, code, compare_type,
3479 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3480 return lhs;
3483 /* Subroutine for fold_truth_andor_1: decode a field reference.
3485 If EXP is a comparison reference, we return the innermost reference.
3487 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3488 set to the starting bit number.
3490 If the innermost field can be completely contained in a mode-sized
3491 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3493 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3494 otherwise it is not changed.
3496 *PUNSIGNEDP is set to the signedness of the field.
3498 *PMASK is set to the mask used. This is either contained in a
3499 BIT_AND_EXPR or derived from the width of the field.
3501 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3503 Return 0 if this is not a component reference or is one that we can't
3504 do anything with. */
3506 static tree
3507 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3508 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3509 int *punsignedp, int *pvolatilep,
3510 tree *pmask, tree *pand_mask)
3512 tree outer_type = 0;
3513 tree and_mask = 0;
3514 tree mask, inner, offset;
3515 tree unsigned_type;
3516 unsigned int precision;
3518 /* All the optimizations using this function assume integer fields.
3519 There are problems with FP fields since the type_for_size call
3520 below can fail for, e.g., XFmode. */
3521 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3522 return 0;
3524 /* We are interested in the bare arrangement of bits, so strip everything
3525 that doesn't affect the machine mode. However, record the type of the
3526 outermost expression if it may matter below. */
3527 if (CONVERT_EXPR_P (exp)
3528 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3529 outer_type = TREE_TYPE (exp);
3530 STRIP_NOPS (exp);
3532 if (TREE_CODE (exp) == BIT_AND_EXPR)
3534 and_mask = TREE_OPERAND (exp, 1);
3535 exp = TREE_OPERAND (exp, 0);
3536 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3537 if (TREE_CODE (and_mask) != INTEGER_CST)
3538 return 0;
3541 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3542 punsignedp, pvolatilep, false);
3543 if ((inner == exp && and_mask == 0)
3544 || *pbitsize < 0 || offset != 0
3545 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3546 return 0;
3548 /* If the number of bits in the reference is the same as the bitsize of
3549 the outer type, then the outer type gives the signedness. Otherwise
3550 (in case of a small bitfield) the signedness is unchanged. */
3551 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3552 *punsignedp = TYPE_UNSIGNED (outer_type);
3554 /* Compute the mask to access the bitfield. */
3555 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3556 precision = TYPE_PRECISION (unsigned_type);
3558 mask = build_int_cst_type (unsigned_type, -1);
3560 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3561 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3563 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3564 if (and_mask != 0)
3565 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3566 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3568 *pmask = mask;
3569 *pand_mask = and_mask;
3570 return inner;
3573 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3574 bit positions. */
3576 static int
3577 all_ones_mask_p (const_tree mask, int size)
3579 tree type = TREE_TYPE (mask);
3580 unsigned int precision = TYPE_PRECISION (type);
3581 tree tmask;
3583 tmask = build_int_cst_type (signed_type_for (type), -1);
3585 return
3586 tree_int_cst_equal (mask,
3587 const_binop (RSHIFT_EXPR,
3588 const_binop (LSHIFT_EXPR, tmask,
3589 size_int (precision - size)),
3590 size_int (precision - size)));
3593 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3594 represents the sign bit of EXP's type. If EXP represents a sign
3595 or zero extension, also test VAL against the unextended type.
3596 The return value is the (sub)expression whose sign bit is VAL,
3597 or NULL_TREE otherwise. */
3599 static tree
3600 sign_bit_p (tree exp, const_tree val)
3602 unsigned HOST_WIDE_INT mask_lo, lo;
3603 HOST_WIDE_INT mask_hi, hi;
3604 int width;
3605 tree t;
3607 /* Tree EXP must have an integral type. */
3608 t = TREE_TYPE (exp);
3609 if (! INTEGRAL_TYPE_P (t))
3610 return NULL_TREE;
3612 /* Tree VAL must be an integer constant. */
3613 if (TREE_CODE (val) != INTEGER_CST
3614 || TREE_OVERFLOW (val))
3615 return NULL_TREE;
3617 width = TYPE_PRECISION (t);
3618 if (width > HOST_BITS_PER_WIDE_INT)
3620 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3621 lo = 0;
3623 mask_hi = ((unsigned HOST_WIDE_INT) -1
3624 >> (HOST_BITS_PER_DOUBLE_INT - width));
3625 mask_lo = -1;
3627 else
3629 hi = 0;
3630 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3632 mask_hi = 0;
3633 mask_lo = ((unsigned HOST_WIDE_INT) -1
3634 >> (HOST_BITS_PER_WIDE_INT - width));
3637 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3638 treat VAL as if it were unsigned. */
3639 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3640 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3641 return exp;
3643 /* Handle extension from a narrower type. */
3644 if (TREE_CODE (exp) == NOP_EXPR
3645 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3646 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3648 return NULL_TREE;
3651 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3652 to be evaluated unconditionally. */
3654 static int
3655 simple_operand_p (const_tree exp)
3657 /* Strip any conversions that don't change the machine mode. */
3658 STRIP_NOPS (exp);
3660 return (CONSTANT_CLASS_P (exp)
3661 || TREE_CODE (exp) == SSA_NAME
3662 || (DECL_P (exp)
3663 && ! TREE_ADDRESSABLE (exp)
3664 && ! TREE_THIS_VOLATILE (exp)
3665 && ! DECL_NONLOCAL (exp)
3666 /* Don't regard global variables as simple. They may be
3667 allocated in ways unknown to the compiler (shared memory,
3668 #pragma weak, etc). */
3669 && ! TREE_PUBLIC (exp)
3670 && ! DECL_EXTERNAL (exp)
3671 /* Loading a static variable is unduly expensive, but global
3672 registers aren't expensive. */
3673 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3676 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3677 to be evaluated unconditionally.
3678 I addition to simple_operand_p, we assume that comparisons, conversions,
3679 and logic-not operations are simple, if their operands are simple, too. */
3681 static bool
3682 simple_operand_p_2 (tree exp)
3684 enum tree_code code;
3686 if (TREE_SIDE_EFFECTS (exp)
3687 || tree_could_trap_p (exp))
3688 return false;
3690 while (CONVERT_EXPR_P (exp))
3691 exp = TREE_OPERAND (exp, 0);
3693 code = TREE_CODE (exp);
3695 if (TREE_CODE_CLASS (code) == tcc_comparison)
3696 return (simple_operand_p (TREE_OPERAND (exp, 0))
3697 && simple_operand_p (TREE_OPERAND (exp, 1)));
3699 if (code == TRUTH_NOT_EXPR)
3700 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3702 return simple_operand_p (exp);
3706 /* The following functions are subroutines to fold_range_test and allow it to
3707 try to change a logical combination of comparisons into a range test.
3709 For example, both
3710 X == 2 || X == 3 || X == 4 || X == 5
3712 X >= 2 && X <= 5
3713 are converted to
3714 (unsigned) (X - 2) <= 3
3716 We describe each set of comparisons as being either inside or outside
3717 a range, using a variable named like IN_P, and then describe the
3718 range with a lower and upper bound. If one of the bounds is omitted,
3719 it represents either the highest or lowest value of the type.
3721 In the comments below, we represent a range by two numbers in brackets
3722 preceded by a "+" to designate being inside that range, or a "-" to
3723 designate being outside that range, so the condition can be inverted by
3724 flipping the prefix. An omitted bound is represented by a "-". For
3725 example, "- [-, 10]" means being outside the range starting at the lowest
3726 possible value and ending at 10, in other words, being greater than 10.
3727 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3728 always false.
3730 We set up things so that the missing bounds are handled in a consistent
3731 manner so neither a missing bound nor "true" and "false" need to be
3732 handled using a special case. */
3734 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3735 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3736 and UPPER1_P are nonzero if the respective argument is an upper bound
3737 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3738 must be specified for a comparison. ARG1 will be converted to ARG0's
3739 type if both are specified. */
3741 static tree
3742 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3743 tree arg1, int upper1_p)
3745 tree tem;
3746 int result;
3747 int sgn0, sgn1;
3749 /* If neither arg represents infinity, do the normal operation.
3750 Else, if not a comparison, return infinity. Else handle the special
3751 comparison rules. Note that most of the cases below won't occur, but
3752 are handled for consistency. */
3754 if (arg0 != 0 && arg1 != 0)
3756 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3757 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3758 STRIP_NOPS (tem);
3759 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3762 if (TREE_CODE_CLASS (code) != tcc_comparison)
3763 return 0;
3765 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3766 for neither. In real maths, we cannot assume open ended ranges are
3767 the same. But, this is computer arithmetic, where numbers are finite.
3768 We can therefore make the transformation of any unbounded range with
3769 the value Z, Z being greater than any representable number. This permits
3770 us to treat unbounded ranges as equal. */
3771 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3772 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3773 switch (code)
3775 case EQ_EXPR:
3776 result = sgn0 == sgn1;
3777 break;
3778 case NE_EXPR:
3779 result = sgn0 != sgn1;
3780 break;
3781 case LT_EXPR:
3782 result = sgn0 < sgn1;
3783 break;
3784 case LE_EXPR:
3785 result = sgn0 <= sgn1;
3786 break;
3787 case GT_EXPR:
3788 result = sgn0 > sgn1;
3789 break;
3790 case GE_EXPR:
3791 result = sgn0 >= sgn1;
3792 break;
3793 default:
3794 gcc_unreachable ();
3797 return constant_boolean_node (result, type);
3800 /* Helper routine for make_range. Perform one step for it, return
3801 new expression if the loop should continue or NULL_TREE if it should
3802 stop. */
3804 tree
3805 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3806 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3807 bool *strict_overflow_p)
3809 tree arg0_type = TREE_TYPE (arg0);
3810 tree n_low, n_high, low = *p_low, high = *p_high;
3811 int in_p = *p_in_p, n_in_p;
3813 switch (code)
3815 case TRUTH_NOT_EXPR:
3816 /* We can only do something if the range is testing for zero. */
3817 if (low == NULL_TREE || high == NULL_TREE
3818 || ! integer_zerop (low) || ! integer_zerop (high))
3819 return NULL_TREE;
3820 *p_in_p = ! in_p;
3821 return arg0;
3823 case EQ_EXPR: case NE_EXPR:
3824 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3825 /* We can only do something if the range is testing for zero
3826 and if the second operand is an integer constant. Note that
3827 saying something is "in" the range we make is done by
3828 complementing IN_P since it will set in the initial case of
3829 being not equal to zero; "out" is leaving it alone. */
3830 if (low == NULL_TREE || high == NULL_TREE
3831 || ! integer_zerop (low) || ! integer_zerop (high)
3832 || TREE_CODE (arg1) != INTEGER_CST)
3833 return NULL_TREE;
3835 switch (code)
3837 case NE_EXPR: /* - [c, c] */
3838 low = high = arg1;
3839 break;
3840 case EQ_EXPR: /* + [c, c] */
3841 in_p = ! in_p, low = high = arg1;
3842 break;
3843 case GT_EXPR: /* - [-, c] */
3844 low = 0, high = arg1;
3845 break;
3846 case GE_EXPR: /* + [c, -] */
3847 in_p = ! in_p, low = arg1, high = 0;
3848 break;
3849 case LT_EXPR: /* - [c, -] */
3850 low = arg1, high = 0;
3851 break;
3852 case LE_EXPR: /* + [-, c] */
3853 in_p = ! in_p, low = 0, high = arg1;
3854 break;
3855 default:
3856 gcc_unreachable ();
3859 /* If this is an unsigned comparison, we also know that EXP is
3860 greater than or equal to zero. We base the range tests we make
3861 on that fact, so we record it here so we can parse existing
3862 range tests. We test arg0_type since often the return type
3863 of, e.g. EQ_EXPR, is boolean. */
3864 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3866 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3867 in_p, low, high, 1,
3868 build_int_cst (arg0_type, 0),
3869 NULL_TREE))
3870 return NULL_TREE;
3872 in_p = n_in_p, low = n_low, high = n_high;
3874 /* If the high bound is missing, but we have a nonzero low
3875 bound, reverse the range so it goes from zero to the low bound
3876 minus 1. */
3877 if (high == 0 && low && ! integer_zerop (low))
3879 in_p = ! in_p;
3880 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3881 integer_one_node, 0);
3882 low = build_int_cst (arg0_type, 0);
3886 *p_low = low;
3887 *p_high = high;
3888 *p_in_p = in_p;
3889 return arg0;
3891 case NEGATE_EXPR:
3892 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3893 low and high are non-NULL, then normalize will DTRT. */
3894 if (!TYPE_UNSIGNED (arg0_type)
3895 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3897 if (low == NULL_TREE)
3898 low = TYPE_MIN_VALUE (arg0_type);
3899 if (high == NULL_TREE)
3900 high = TYPE_MAX_VALUE (arg0_type);
3903 /* (-x) IN [a,b] -> x in [-b, -a] */
3904 n_low = range_binop (MINUS_EXPR, exp_type,
3905 build_int_cst (exp_type, 0),
3906 0, high, 1);
3907 n_high = range_binop (MINUS_EXPR, exp_type,
3908 build_int_cst (exp_type, 0),
3909 0, low, 0);
3910 if (n_high != 0 && TREE_OVERFLOW (n_high))
3911 return NULL_TREE;
3912 goto normalize;
3914 case BIT_NOT_EXPR:
3915 /* ~ X -> -X - 1 */
3916 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3917 build_int_cst (exp_type, 1));
3919 case PLUS_EXPR:
3920 case MINUS_EXPR:
3921 if (TREE_CODE (arg1) != INTEGER_CST)
3922 return NULL_TREE;
3924 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3925 move a constant to the other side. */
3926 if (!TYPE_UNSIGNED (arg0_type)
3927 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3928 return NULL_TREE;
3930 /* If EXP is signed, any overflow in the computation is undefined,
3931 so we don't worry about it so long as our computations on
3932 the bounds don't overflow. For unsigned, overflow is defined
3933 and this is exactly the right thing. */
3934 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3935 arg0_type, low, 0, arg1, 0);
3936 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3937 arg0_type, high, 1, arg1, 0);
3938 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3939 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3940 return NULL_TREE;
3942 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3943 *strict_overflow_p = true;
3945 normalize:
3946 /* Check for an unsigned range which has wrapped around the maximum
3947 value thus making n_high < n_low, and normalize it. */
3948 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3950 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3951 integer_one_node, 0);
3952 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3953 integer_one_node, 0);
3955 /* If the range is of the form +/- [ x+1, x ], we won't
3956 be able to normalize it. But then, it represents the
3957 whole range or the empty set, so make it
3958 +/- [ -, - ]. */
3959 if (tree_int_cst_equal (n_low, low)
3960 && tree_int_cst_equal (n_high, high))
3961 low = high = 0;
3962 else
3963 in_p = ! in_p;
3965 else
3966 low = n_low, high = n_high;
3968 *p_low = low;
3969 *p_high = high;
3970 *p_in_p = in_p;
3971 return arg0;
3973 CASE_CONVERT:
3974 case NON_LVALUE_EXPR:
3975 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3976 return NULL_TREE;
3978 if (! INTEGRAL_TYPE_P (arg0_type)
3979 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3980 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3981 return NULL_TREE;
3983 n_low = low, n_high = high;
3985 if (n_low != 0)
3986 n_low = fold_convert_loc (loc, arg0_type, n_low);
3988 if (n_high != 0)
3989 n_high = fold_convert_loc (loc, arg0_type, n_high);
3991 /* If we're converting arg0 from an unsigned type, to exp,
3992 a signed type, we will be doing the comparison as unsigned.
3993 The tests above have already verified that LOW and HIGH
3994 are both positive.
3996 So we have to ensure that we will handle large unsigned
3997 values the same way that the current signed bounds treat
3998 negative values. */
4000 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4002 tree high_positive;
4003 tree equiv_type;
4004 /* For fixed-point modes, we need to pass the saturating flag
4005 as the 2nd parameter. */
4006 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4007 equiv_type
4008 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4009 TYPE_SATURATING (arg0_type));
4010 else
4011 equiv_type
4012 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4014 /* A range without an upper bound is, naturally, unbounded.
4015 Since convert would have cropped a very large value, use
4016 the max value for the destination type. */
4017 high_positive
4018 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4019 : TYPE_MAX_VALUE (arg0_type);
4021 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4022 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4023 fold_convert_loc (loc, arg0_type,
4024 high_positive),
4025 build_int_cst (arg0_type, 1));
4027 /* If the low bound is specified, "and" the range with the
4028 range for which the original unsigned value will be
4029 positive. */
4030 if (low != 0)
4032 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4033 1, fold_convert_loc (loc, arg0_type,
4034 integer_zero_node),
4035 high_positive))
4036 return NULL_TREE;
4038 in_p = (n_in_p == in_p);
4040 else
4042 /* Otherwise, "or" the range with the range of the input
4043 that will be interpreted as negative. */
4044 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4045 1, fold_convert_loc (loc, arg0_type,
4046 integer_zero_node),
4047 high_positive))
4048 return NULL_TREE;
4050 in_p = (in_p != n_in_p);
4054 *p_low = n_low;
4055 *p_high = n_high;
4056 *p_in_p = in_p;
4057 return arg0;
4059 default:
4060 return NULL_TREE;
4064 /* Given EXP, a logical expression, set the range it is testing into
4065 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4066 actually being tested. *PLOW and *PHIGH will be made of the same
4067 type as the returned expression. If EXP is not a comparison, we
4068 will most likely not be returning a useful value and range. Set
4069 *STRICT_OVERFLOW_P to true if the return value is only valid
4070 because signed overflow is undefined; otherwise, do not change
4071 *STRICT_OVERFLOW_P. */
4073 tree
4074 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4075 bool *strict_overflow_p)
4077 enum tree_code code;
4078 tree arg0, arg1 = NULL_TREE;
4079 tree exp_type, nexp;
4080 int in_p;
4081 tree low, high;
4082 location_t loc = EXPR_LOCATION (exp);
4084 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4085 and see if we can refine the range. Some of the cases below may not
4086 happen, but it doesn't seem worth worrying about this. We "continue"
4087 the outer loop when we've changed something; otherwise we "break"
4088 the switch, which will "break" the while. */
4090 in_p = 0;
4091 low = high = build_int_cst (TREE_TYPE (exp), 0);
4093 while (1)
4095 code = TREE_CODE (exp);
4096 exp_type = TREE_TYPE (exp);
4097 arg0 = NULL_TREE;
4099 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4101 if (TREE_OPERAND_LENGTH (exp) > 0)
4102 arg0 = TREE_OPERAND (exp, 0);
4103 if (TREE_CODE_CLASS (code) == tcc_binary
4104 || TREE_CODE_CLASS (code) == tcc_comparison
4105 || (TREE_CODE_CLASS (code) == tcc_expression
4106 && TREE_OPERAND_LENGTH (exp) > 1))
4107 arg1 = TREE_OPERAND (exp, 1);
4109 if (arg0 == NULL_TREE)
4110 break;
4112 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4113 &high, &in_p, strict_overflow_p);
4114 if (nexp == NULL_TREE)
4115 break;
4116 exp = nexp;
4119 /* If EXP is a constant, we can evaluate whether this is true or false. */
4120 if (TREE_CODE (exp) == INTEGER_CST)
4122 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4123 exp, 0, low, 0))
4124 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4125 exp, 1, high, 1)));
4126 low = high = 0;
4127 exp = 0;
4130 *pin_p = in_p, *plow = low, *phigh = high;
4131 return exp;
4134 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4135 type, TYPE, return an expression to test if EXP is in (or out of, depending
4136 on IN_P) the range. Return 0 if the test couldn't be created. */
4138 tree
4139 build_range_check (location_t loc, tree type, tree exp, int in_p,
4140 tree low, tree high)
4142 tree etype = TREE_TYPE (exp), value;
4144 #ifdef HAVE_canonicalize_funcptr_for_compare
4145 /* Disable this optimization for function pointer expressions
4146 on targets that require function pointer canonicalization. */
4147 if (HAVE_canonicalize_funcptr_for_compare
4148 && TREE_CODE (etype) == POINTER_TYPE
4149 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4150 return NULL_TREE;
4151 #endif
4153 if (! in_p)
4155 value = build_range_check (loc, type, exp, 1, low, high);
4156 if (value != 0)
4157 return invert_truthvalue_loc (loc, value);
4159 return 0;
4162 if (low == 0 && high == 0)
4163 return build_int_cst (type, 1);
4165 if (low == 0)
4166 return fold_build2_loc (loc, LE_EXPR, type, exp,
4167 fold_convert_loc (loc, etype, high));
4169 if (high == 0)
4170 return fold_build2_loc (loc, GE_EXPR, type, exp,
4171 fold_convert_loc (loc, etype, low));
4173 if (operand_equal_p (low, high, 0))
4174 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4175 fold_convert_loc (loc, etype, low));
4177 if (integer_zerop (low))
4179 if (! TYPE_UNSIGNED (etype))
4181 etype = unsigned_type_for (etype);
4182 high = fold_convert_loc (loc, etype, high);
4183 exp = fold_convert_loc (loc, etype, exp);
4185 return build_range_check (loc, type, exp, 1, 0, high);
4188 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4189 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4191 unsigned HOST_WIDE_INT lo;
4192 HOST_WIDE_INT hi;
4193 int prec;
4195 prec = TYPE_PRECISION (etype);
4196 if (prec <= HOST_BITS_PER_WIDE_INT)
4198 hi = 0;
4199 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4201 else
4203 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4204 lo = (unsigned HOST_WIDE_INT) -1;
4207 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4209 if (TYPE_UNSIGNED (etype))
4211 tree signed_etype = signed_type_for (etype);
4212 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4213 etype
4214 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4215 else
4216 etype = signed_etype;
4217 exp = fold_convert_loc (loc, etype, exp);
4219 return fold_build2_loc (loc, GT_EXPR, type, exp,
4220 build_int_cst (etype, 0));
4224 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4225 This requires wrap-around arithmetics for the type of the expression.
4226 First make sure that arithmetics in this type is valid, then make sure
4227 that it wraps around. */
4228 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4229 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4230 TYPE_UNSIGNED (etype));
4232 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4234 tree utype, minv, maxv;
4236 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4237 for the type in question, as we rely on this here. */
4238 utype = unsigned_type_for (etype);
4239 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4240 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4241 integer_one_node, 1);
4242 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4244 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4245 minv, 1, maxv, 1)))
4246 etype = utype;
4247 else
4248 return 0;
4251 high = fold_convert_loc (loc, etype, high);
4252 low = fold_convert_loc (loc, etype, low);
4253 exp = fold_convert_loc (loc, etype, exp);
4255 value = const_binop (MINUS_EXPR, high, low);
4258 if (POINTER_TYPE_P (etype))
4260 if (value != 0 && !TREE_OVERFLOW (value))
4262 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4263 return build_range_check (loc, type,
4264 fold_build_pointer_plus_loc (loc, exp, low),
4265 1, build_int_cst (etype, 0), value);
4267 return 0;
4270 if (value != 0 && !TREE_OVERFLOW (value))
4271 return build_range_check (loc, type,
4272 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4273 1, build_int_cst (etype, 0), value);
4275 return 0;
4278 /* Return the predecessor of VAL in its type, handling the infinite case. */
4280 static tree
4281 range_predecessor (tree val)
4283 tree type = TREE_TYPE (val);
4285 if (INTEGRAL_TYPE_P (type)
4286 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4287 return 0;
4288 else
4289 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4292 /* Return the successor of VAL in its type, handling the infinite case. */
4294 static tree
4295 range_successor (tree val)
4297 tree type = TREE_TYPE (val);
4299 if (INTEGRAL_TYPE_P (type)
4300 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4301 return 0;
4302 else
4303 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4306 /* Given two ranges, see if we can merge them into one. Return 1 if we
4307 can, 0 if we can't. Set the output range into the specified parameters. */
4309 bool
4310 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4311 tree high0, int in1_p, tree low1, tree high1)
4313 int no_overlap;
4314 int subset;
4315 int temp;
4316 tree tem;
4317 int in_p;
4318 tree low, high;
4319 int lowequal = ((low0 == 0 && low1 == 0)
4320 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4321 low0, 0, low1, 0)));
4322 int highequal = ((high0 == 0 && high1 == 0)
4323 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4324 high0, 1, high1, 1)));
4326 /* Make range 0 be the range that starts first, or ends last if they
4327 start at the same value. Swap them if it isn't. */
4328 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4329 low0, 0, low1, 0))
4330 || (lowequal
4331 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4332 high1, 1, high0, 1))))
4334 temp = in0_p, in0_p = in1_p, in1_p = temp;
4335 tem = low0, low0 = low1, low1 = tem;
4336 tem = high0, high0 = high1, high1 = tem;
4339 /* Now flag two cases, whether the ranges are disjoint or whether the
4340 second range is totally subsumed in the first. Note that the tests
4341 below are simplified by the ones above. */
4342 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4343 high0, 1, low1, 0));
4344 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4345 high1, 1, high0, 1));
4347 /* We now have four cases, depending on whether we are including or
4348 excluding the two ranges. */
4349 if (in0_p && in1_p)
4351 /* If they don't overlap, the result is false. If the second range
4352 is a subset it is the result. Otherwise, the range is from the start
4353 of the second to the end of the first. */
4354 if (no_overlap)
4355 in_p = 0, low = high = 0;
4356 else if (subset)
4357 in_p = 1, low = low1, high = high1;
4358 else
4359 in_p = 1, low = low1, high = high0;
4362 else if (in0_p && ! in1_p)
4364 /* If they don't overlap, the result is the first range. If they are
4365 equal, the result is false. If the second range is a subset of the
4366 first, and the ranges begin at the same place, we go from just after
4367 the end of the second range to the end of the first. If the second
4368 range is not a subset of the first, or if it is a subset and both
4369 ranges end at the same place, the range starts at the start of the
4370 first range and ends just before the second range.
4371 Otherwise, we can't describe this as a single range. */
4372 if (no_overlap)
4373 in_p = 1, low = low0, high = high0;
4374 else if (lowequal && highequal)
4375 in_p = 0, low = high = 0;
4376 else if (subset && lowequal)
4378 low = range_successor (high1);
4379 high = high0;
4380 in_p = 1;
4381 if (low == 0)
4383 /* We are in the weird situation where high0 > high1 but
4384 high1 has no successor. Punt. */
4385 return 0;
4388 else if (! subset || highequal)
4390 low = low0;
4391 high = range_predecessor (low1);
4392 in_p = 1;
4393 if (high == 0)
4395 /* low0 < low1 but low1 has no predecessor. Punt. */
4396 return 0;
4399 else
4400 return 0;
4403 else if (! in0_p && in1_p)
4405 /* If they don't overlap, the result is the second range. If the second
4406 is a subset of the first, the result is false. Otherwise,
4407 the range starts just after the first range and ends at the
4408 end of the second. */
4409 if (no_overlap)
4410 in_p = 1, low = low1, high = high1;
4411 else if (subset || highequal)
4412 in_p = 0, low = high = 0;
4413 else
4415 low = range_successor (high0);
4416 high = high1;
4417 in_p = 1;
4418 if (low == 0)
4420 /* high1 > high0 but high0 has no successor. Punt. */
4421 return 0;
4426 else
4428 /* The case where we are excluding both ranges. Here the complex case
4429 is if they don't overlap. In that case, the only time we have a
4430 range is if they are adjacent. If the second is a subset of the
4431 first, the result is the first. Otherwise, the range to exclude
4432 starts at the beginning of the first range and ends at the end of the
4433 second. */
4434 if (no_overlap)
4436 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4437 range_successor (high0),
4438 1, low1, 0)))
4439 in_p = 0, low = low0, high = high1;
4440 else
4442 /* Canonicalize - [min, x] into - [-, x]. */
4443 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4444 switch (TREE_CODE (TREE_TYPE (low0)))
4446 case ENUMERAL_TYPE:
4447 if (TYPE_PRECISION (TREE_TYPE (low0))
4448 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4449 break;
4450 /* FALLTHROUGH */
4451 case INTEGER_TYPE:
4452 if (tree_int_cst_equal (low0,
4453 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4454 low0 = 0;
4455 break;
4456 case POINTER_TYPE:
4457 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4458 && integer_zerop (low0))
4459 low0 = 0;
4460 break;
4461 default:
4462 break;
4465 /* Canonicalize - [x, max] into - [x, -]. */
4466 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4467 switch (TREE_CODE (TREE_TYPE (high1)))
4469 case ENUMERAL_TYPE:
4470 if (TYPE_PRECISION (TREE_TYPE (high1))
4471 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4472 break;
4473 /* FALLTHROUGH */
4474 case INTEGER_TYPE:
4475 if (tree_int_cst_equal (high1,
4476 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4477 high1 = 0;
4478 break;
4479 case POINTER_TYPE:
4480 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4481 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4482 high1, 1,
4483 integer_one_node, 1)))
4484 high1 = 0;
4485 break;
4486 default:
4487 break;
4490 /* The ranges might be also adjacent between the maximum and
4491 minimum values of the given type. For
4492 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4493 return + [x + 1, y - 1]. */
4494 if (low0 == 0 && high1 == 0)
4496 low = range_successor (high0);
4497 high = range_predecessor (low1);
4498 if (low == 0 || high == 0)
4499 return 0;
4501 in_p = 1;
4503 else
4504 return 0;
4507 else if (subset)
4508 in_p = 0, low = low0, high = high0;
4509 else
4510 in_p = 0, low = low0, high = high1;
4513 *pin_p = in_p, *plow = low, *phigh = high;
4514 return 1;
4518 /* Subroutine of fold, looking inside expressions of the form
4519 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4520 of the COND_EXPR. This function is being used also to optimize
4521 A op B ? C : A, by reversing the comparison first.
4523 Return a folded expression whose code is not a COND_EXPR
4524 anymore, or NULL_TREE if no folding opportunity is found. */
4526 static tree
4527 fold_cond_expr_with_comparison (location_t loc, tree type,
4528 tree arg0, tree arg1, tree arg2)
4530 enum tree_code comp_code = TREE_CODE (arg0);
4531 tree arg00 = TREE_OPERAND (arg0, 0);
4532 tree arg01 = TREE_OPERAND (arg0, 1);
4533 tree arg1_type = TREE_TYPE (arg1);
4534 tree tem;
4536 STRIP_NOPS (arg1);
4537 STRIP_NOPS (arg2);
4539 /* If we have A op 0 ? A : -A, consider applying the following
4540 transformations:
4542 A == 0? A : -A same as -A
4543 A != 0? A : -A same as A
4544 A >= 0? A : -A same as abs (A)
4545 A > 0? A : -A same as abs (A)
4546 A <= 0? A : -A same as -abs (A)
4547 A < 0? A : -A same as -abs (A)
4549 None of these transformations work for modes with signed
4550 zeros. If A is +/-0, the first two transformations will
4551 change the sign of the result (from +0 to -0, or vice
4552 versa). The last four will fix the sign of the result,
4553 even though the original expressions could be positive or
4554 negative, depending on the sign of A.
4556 Note that all these transformations are correct if A is
4557 NaN, since the two alternatives (A and -A) are also NaNs. */
4558 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4559 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4560 ? real_zerop (arg01)
4561 : integer_zerop (arg01))
4562 && ((TREE_CODE (arg2) == NEGATE_EXPR
4563 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4564 /* In the case that A is of the form X-Y, '-A' (arg2) may
4565 have already been folded to Y-X, check for that. */
4566 || (TREE_CODE (arg1) == MINUS_EXPR
4567 && TREE_CODE (arg2) == MINUS_EXPR
4568 && operand_equal_p (TREE_OPERAND (arg1, 0),
4569 TREE_OPERAND (arg2, 1), 0)
4570 && operand_equal_p (TREE_OPERAND (arg1, 1),
4571 TREE_OPERAND (arg2, 0), 0))))
4572 switch (comp_code)
4574 case EQ_EXPR:
4575 case UNEQ_EXPR:
4576 tem = fold_convert_loc (loc, arg1_type, arg1);
4577 return pedantic_non_lvalue_loc (loc,
4578 fold_convert_loc (loc, type,
4579 negate_expr (tem)));
4580 case NE_EXPR:
4581 case LTGT_EXPR:
4582 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4583 case UNGE_EXPR:
4584 case UNGT_EXPR:
4585 if (flag_trapping_math)
4586 break;
4587 /* Fall through. */
4588 case GE_EXPR:
4589 case GT_EXPR:
4590 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4591 arg1 = fold_convert_loc (loc, signed_type_for
4592 (TREE_TYPE (arg1)), arg1);
4593 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4594 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4595 case UNLE_EXPR:
4596 case UNLT_EXPR:
4597 if (flag_trapping_math)
4598 break;
4599 case LE_EXPR:
4600 case LT_EXPR:
4601 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4602 arg1 = fold_convert_loc (loc, signed_type_for
4603 (TREE_TYPE (arg1)), arg1);
4604 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4605 return negate_expr (fold_convert_loc (loc, type, tem));
4606 default:
4607 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4608 break;
4611 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4612 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4613 both transformations are correct when A is NaN: A != 0
4614 is then true, and A == 0 is false. */
4616 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4617 && integer_zerop (arg01) && integer_zerop (arg2))
4619 if (comp_code == NE_EXPR)
4620 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4621 else if (comp_code == EQ_EXPR)
4622 return build_int_cst (type, 0);
4625 /* Try some transformations of A op B ? A : B.
4627 A == B? A : B same as B
4628 A != B? A : B same as A
4629 A >= B? A : B same as max (A, B)
4630 A > B? A : B same as max (B, A)
4631 A <= B? A : B same as min (A, B)
4632 A < B? A : B same as min (B, A)
4634 As above, these transformations don't work in the presence
4635 of signed zeros. For example, if A and B are zeros of
4636 opposite sign, the first two transformations will change
4637 the sign of the result. In the last four, the original
4638 expressions give different results for (A=+0, B=-0) and
4639 (A=-0, B=+0), but the transformed expressions do not.
4641 The first two transformations are correct if either A or B
4642 is a NaN. In the first transformation, the condition will
4643 be false, and B will indeed be chosen. In the case of the
4644 second transformation, the condition A != B will be true,
4645 and A will be chosen.
4647 The conversions to max() and min() are not correct if B is
4648 a number and A is not. The conditions in the original
4649 expressions will be false, so all four give B. The min()
4650 and max() versions would give a NaN instead. */
4651 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4652 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4653 /* Avoid these transformations if the COND_EXPR may be used
4654 as an lvalue in the C++ front-end. PR c++/19199. */
4655 && (in_gimple_form
4656 || (strcmp (lang_hooks.name, "GNU C++") != 0
4657 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4658 || ! maybe_lvalue_p (arg1)
4659 || ! maybe_lvalue_p (arg2)))
4661 tree comp_op0 = arg00;
4662 tree comp_op1 = arg01;
4663 tree comp_type = TREE_TYPE (comp_op0);
4665 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4666 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4668 comp_type = type;
4669 comp_op0 = arg1;
4670 comp_op1 = arg2;
4673 switch (comp_code)
4675 case EQ_EXPR:
4676 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4677 case NE_EXPR:
4678 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4679 case LE_EXPR:
4680 case LT_EXPR:
4681 case UNLE_EXPR:
4682 case UNLT_EXPR:
4683 /* In C++ a ?: expression can be an lvalue, so put the
4684 operand which will be used if they are equal first
4685 so that we can convert this back to the
4686 corresponding COND_EXPR. */
4687 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4689 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4690 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4691 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4692 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4693 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4694 comp_op1, comp_op0);
4695 return pedantic_non_lvalue_loc (loc,
4696 fold_convert_loc (loc, type, tem));
4698 break;
4699 case GE_EXPR:
4700 case GT_EXPR:
4701 case UNGE_EXPR:
4702 case UNGT_EXPR:
4703 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4705 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4706 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4707 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4708 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4709 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4710 comp_op1, comp_op0);
4711 return pedantic_non_lvalue_loc (loc,
4712 fold_convert_loc (loc, type, tem));
4714 break;
4715 case UNEQ_EXPR:
4716 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4717 return pedantic_non_lvalue_loc (loc,
4718 fold_convert_loc (loc, type, arg2));
4719 break;
4720 case LTGT_EXPR:
4721 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4722 return pedantic_non_lvalue_loc (loc,
4723 fold_convert_loc (loc, type, arg1));
4724 break;
4725 default:
4726 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4727 break;
4731 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4732 we might still be able to simplify this. For example,
4733 if C1 is one less or one more than C2, this might have started
4734 out as a MIN or MAX and been transformed by this function.
4735 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4737 if (INTEGRAL_TYPE_P (type)
4738 && TREE_CODE (arg01) == INTEGER_CST
4739 && TREE_CODE (arg2) == INTEGER_CST)
4740 switch (comp_code)
4742 case EQ_EXPR:
4743 if (TREE_CODE (arg1) == INTEGER_CST)
4744 break;
4745 /* We can replace A with C1 in this case. */
4746 arg1 = fold_convert_loc (loc, type, arg01);
4747 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4749 case LT_EXPR:
4750 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4751 MIN_EXPR, to preserve the signedness of the comparison. */
4752 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4753 OEP_ONLY_CONST)
4754 && operand_equal_p (arg01,
4755 const_binop (PLUS_EXPR, arg2,
4756 build_int_cst (type, 1)),
4757 OEP_ONLY_CONST))
4759 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4760 fold_convert_loc (loc, TREE_TYPE (arg00),
4761 arg2));
4762 return pedantic_non_lvalue_loc (loc,
4763 fold_convert_loc (loc, type, tem));
4765 break;
4767 case LE_EXPR:
4768 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4769 as above. */
4770 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4771 OEP_ONLY_CONST)
4772 && operand_equal_p (arg01,
4773 const_binop (MINUS_EXPR, arg2,
4774 build_int_cst (type, 1)),
4775 OEP_ONLY_CONST))
4777 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4778 fold_convert_loc (loc, TREE_TYPE (arg00),
4779 arg2));
4780 return pedantic_non_lvalue_loc (loc,
4781 fold_convert_loc (loc, type, tem));
4783 break;
4785 case GT_EXPR:
4786 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4787 MAX_EXPR, to preserve the signedness of the comparison. */
4788 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4789 OEP_ONLY_CONST)
4790 && operand_equal_p (arg01,
4791 const_binop (MINUS_EXPR, arg2,
4792 build_int_cst (type, 1)),
4793 OEP_ONLY_CONST))
4795 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4796 fold_convert_loc (loc, TREE_TYPE (arg00),
4797 arg2));
4798 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4800 break;
4802 case GE_EXPR:
4803 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4804 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4805 OEP_ONLY_CONST)
4806 && operand_equal_p (arg01,
4807 const_binop (PLUS_EXPR, arg2,
4808 build_int_cst (type, 1)),
4809 OEP_ONLY_CONST))
4811 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4812 fold_convert_loc (loc, TREE_TYPE (arg00),
4813 arg2));
4814 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4816 break;
4817 case NE_EXPR:
4818 break;
4819 default:
4820 gcc_unreachable ();
4823 return NULL_TREE;
4828 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4829 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4830 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4831 false) >= 2)
4832 #endif
4834 /* EXP is some logical combination of boolean tests. See if we can
4835 merge it into some range test. Return the new tree if so. */
4837 static tree
4838 fold_range_test (location_t loc, enum tree_code code, tree type,
4839 tree op0, tree op1)
4841 int or_op = (code == TRUTH_ORIF_EXPR
4842 || code == TRUTH_OR_EXPR);
4843 int in0_p, in1_p, in_p;
4844 tree low0, low1, low, high0, high1, high;
4845 bool strict_overflow_p = false;
4846 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4847 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4848 tree tem;
4849 const char * const warnmsg = G_("assuming signed overflow does not occur "
4850 "when simplifying range test");
4852 /* If this is an OR operation, invert both sides; we will invert
4853 again at the end. */
4854 if (or_op)
4855 in0_p = ! in0_p, in1_p = ! in1_p;
4857 /* If both expressions are the same, if we can merge the ranges, and we
4858 can build the range test, return it or it inverted. If one of the
4859 ranges is always true or always false, consider it to be the same
4860 expression as the other. */
4861 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4862 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4863 in1_p, low1, high1)
4864 && 0 != (tem = (build_range_check (loc, type,
4865 lhs != 0 ? lhs
4866 : rhs != 0 ? rhs : integer_zero_node,
4867 in_p, low, high))))
4869 if (strict_overflow_p)
4870 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4871 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4874 /* On machines where the branch cost is expensive, if this is a
4875 short-circuited branch and the underlying object on both sides
4876 is the same, make a non-short-circuit operation. */
4877 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4878 && lhs != 0 && rhs != 0
4879 && (code == TRUTH_ANDIF_EXPR
4880 || code == TRUTH_ORIF_EXPR)
4881 && operand_equal_p (lhs, rhs, 0))
4883 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4884 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4885 which cases we can't do this. */
4886 if (simple_operand_p (lhs))
4887 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4888 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4889 type, op0, op1);
4891 else if (!lang_hooks.decls.global_bindings_p ()
4892 && !CONTAINS_PLACEHOLDER_P (lhs))
4894 tree common = save_expr (lhs);
4896 if (0 != (lhs = build_range_check (loc, type, common,
4897 or_op ? ! in0_p : in0_p,
4898 low0, high0))
4899 && (0 != (rhs = build_range_check (loc, type, common,
4900 or_op ? ! in1_p : in1_p,
4901 low1, high1))))
4903 if (strict_overflow_p)
4904 fold_overflow_warning (warnmsg,
4905 WARN_STRICT_OVERFLOW_COMPARISON);
4906 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4907 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4908 type, lhs, rhs);
4913 return 0;
4916 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4917 bit value. Arrange things so the extra bits will be set to zero if and
4918 only if C is signed-extended to its full width. If MASK is nonzero,
4919 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4921 static tree
4922 unextend (tree c, int p, int unsignedp, tree mask)
4924 tree type = TREE_TYPE (c);
4925 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4926 tree temp;
4928 if (p == modesize || unsignedp)
4929 return c;
4931 /* We work by getting just the sign bit into the low-order bit, then
4932 into the high-order bit, then sign-extend. We then XOR that value
4933 with C. */
4934 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4935 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4937 /* We must use a signed type in order to get an arithmetic right shift.
4938 However, we must also avoid introducing accidental overflows, so that
4939 a subsequent call to integer_zerop will work. Hence we must
4940 do the type conversion here. At this point, the constant is either
4941 zero or one, and the conversion to a signed type can never overflow.
4942 We could get an overflow if this conversion is done anywhere else. */
4943 if (TYPE_UNSIGNED (type))
4944 temp = fold_convert (signed_type_for (type), temp);
4946 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4947 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4948 if (mask != 0)
4949 temp = const_binop (BIT_AND_EXPR, temp,
4950 fold_convert (TREE_TYPE (c), mask));
4951 /* If necessary, convert the type back to match the type of C. */
4952 if (TYPE_UNSIGNED (type))
4953 temp = fold_convert (type, temp);
4955 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4958 /* For an expression that has the form
4959 (A && B) || ~B
4961 (A || B) && ~B,
4962 we can drop one of the inner expressions and simplify to
4963 A || ~B
4965 A && ~B
4966 LOC is the location of the resulting expression. OP is the inner
4967 logical operation; the left-hand side in the examples above, while CMPOP
4968 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4969 removing a condition that guards another, as in
4970 (A != NULL && A->...) || A == NULL
4971 which we must not transform. If RHS_ONLY is true, only eliminate the
4972 right-most operand of the inner logical operation. */
4974 static tree
4975 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4976 bool rhs_only)
4978 tree type = TREE_TYPE (cmpop);
4979 enum tree_code code = TREE_CODE (cmpop);
4980 enum tree_code truthop_code = TREE_CODE (op);
4981 tree lhs = TREE_OPERAND (op, 0);
4982 tree rhs = TREE_OPERAND (op, 1);
4983 tree orig_lhs = lhs, orig_rhs = rhs;
4984 enum tree_code rhs_code = TREE_CODE (rhs);
4985 enum tree_code lhs_code = TREE_CODE (lhs);
4986 enum tree_code inv_code;
4988 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4989 return NULL_TREE;
4991 if (TREE_CODE_CLASS (code) != tcc_comparison)
4992 return NULL_TREE;
4994 if (rhs_code == truthop_code)
4996 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4997 if (newrhs != NULL_TREE)
4999 rhs = newrhs;
5000 rhs_code = TREE_CODE (rhs);
5003 if (lhs_code == truthop_code && !rhs_only)
5005 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5006 if (newlhs != NULL_TREE)
5008 lhs = newlhs;
5009 lhs_code = TREE_CODE (lhs);
5013 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5014 if (inv_code == rhs_code
5015 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5016 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5017 return lhs;
5018 if (!rhs_only && inv_code == lhs_code
5019 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5020 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5021 return rhs;
5022 if (rhs != orig_rhs || lhs != orig_lhs)
5023 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5024 lhs, rhs);
5025 return NULL_TREE;
5028 /* Find ways of folding logical expressions of LHS and RHS:
5029 Try to merge two comparisons to the same innermost item.
5030 Look for range tests like "ch >= '0' && ch <= '9'".
5031 Look for combinations of simple terms on machines with expensive branches
5032 and evaluate the RHS unconditionally.
5034 For example, if we have p->a == 2 && p->b == 4 and we can make an
5035 object large enough to span both A and B, we can do this with a comparison
5036 against the object ANDed with the a mask.
5038 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5039 operations to do this with one comparison.
5041 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5042 function and the one above.
5044 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5045 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5047 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5048 two operands.
5050 We return the simplified tree or 0 if no optimization is possible. */
5052 static tree
5053 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5054 tree lhs, tree rhs)
5056 /* If this is the "or" of two comparisons, we can do something if
5057 the comparisons are NE_EXPR. If this is the "and", we can do something
5058 if the comparisons are EQ_EXPR. I.e.,
5059 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5061 WANTED_CODE is this operation code. For single bit fields, we can
5062 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5063 comparison for one-bit fields. */
5065 enum tree_code wanted_code;
5066 enum tree_code lcode, rcode;
5067 tree ll_arg, lr_arg, rl_arg, rr_arg;
5068 tree ll_inner, lr_inner, rl_inner, rr_inner;
5069 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5070 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5071 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5072 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5073 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5074 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5075 enum machine_mode lnmode, rnmode;
5076 tree ll_mask, lr_mask, rl_mask, rr_mask;
5077 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5078 tree l_const, r_const;
5079 tree lntype, rntype, result;
5080 HOST_WIDE_INT first_bit, end_bit;
5081 int volatilep;
5083 /* Start by getting the comparison codes. Fail if anything is volatile.
5084 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5085 it were surrounded with a NE_EXPR. */
5087 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5088 return 0;
5090 lcode = TREE_CODE (lhs);
5091 rcode = TREE_CODE (rhs);
5093 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5095 lhs = build2 (NE_EXPR, truth_type, lhs,
5096 build_int_cst (TREE_TYPE (lhs), 0));
5097 lcode = NE_EXPR;
5100 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5102 rhs = build2 (NE_EXPR, truth_type, rhs,
5103 build_int_cst (TREE_TYPE (rhs), 0));
5104 rcode = NE_EXPR;
5107 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5108 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5109 return 0;
5111 ll_arg = TREE_OPERAND (lhs, 0);
5112 lr_arg = TREE_OPERAND (lhs, 1);
5113 rl_arg = TREE_OPERAND (rhs, 0);
5114 rr_arg = TREE_OPERAND (rhs, 1);
5116 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5117 if (simple_operand_p (ll_arg)
5118 && simple_operand_p (lr_arg))
5120 if (operand_equal_p (ll_arg, rl_arg, 0)
5121 && operand_equal_p (lr_arg, rr_arg, 0))
5123 result = combine_comparisons (loc, code, lcode, rcode,
5124 truth_type, ll_arg, lr_arg);
5125 if (result)
5126 return result;
5128 else if (operand_equal_p (ll_arg, rr_arg, 0)
5129 && operand_equal_p (lr_arg, rl_arg, 0))
5131 result = combine_comparisons (loc, code, lcode,
5132 swap_tree_comparison (rcode),
5133 truth_type, ll_arg, lr_arg);
5134 if (result)
5135 return result;
5139 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5140 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5142 /* If the RHS can be evaluated unconditionally and its operands are
5143 simple, it wins to evaluate the RHS unconditionally on machines
5144 with expensive branches. In this case, this isn't a comparison
5145 that can be merged. */
5147 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5148 false) >= 2
5149 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5150 && simple_operand_p (rl_arg)
5151 && simple_operand_p (rr_arg))
5153 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5154 if (code == TRUTH_OR_EXPR
5155 && lcode == NE_EXPR && integer_zerop (lr_arg)
5156 && rcode == NE_EXPR && integer_zerop (rr_arg)
5157 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5158 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5159 return build2_loc (loc, NE_EXPR, truth_type,
5160 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5161 ll_arg, rl_arg),
5162 build_int_cst (TREE_TYPE (ll_arg), 0));
5164 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5165 if (code == TRUTH_AND_EXPR
5166 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5167 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5168 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5169 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5170 return build2_loc (loc, EQ_EXPR, truth_type,
5171 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5172 ll_arg, rl_arg),
5173 build_int_cst (TREE_TYPE (ll_arg), 0));
5176 /* See if the comparisons can be merged. Then get all the parameters for
5177 each side. */
5179 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5180 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5181 return 0;
5183 volatilep = 0;
5184 ll_inner = decode_field_reference (loc, ll_arg,
5185 &ll_bitsize, &ll_bitpos, &ll_mode,
5186 &ll_unsignedp, &volatilep, &ll_mask,
5187 &ll_and_mask);
5188 lr_inner = decode_field_reference (loc, lr_arg,
5189 &lr_bitsize, &lr_bitpos, &lr_mode,
5190 &lr_unsignedp, &volatilep, &lr_mask,
5191 &lr_and_mask);
5192 rl_inner = decode_field_reference (loc, rl_arg,
5193 &rl_bitsize, &rl_bitpos, &rl_mode,
5194 &rl_unsignedp, &volatilep, &rl_mask,
5195 &rl_and_mask);
5196 rr_inner = decode_field_reference (loc, rr_arg,
5197 &rr_bitsize, &rr_bitpos, &rr_mode,
5198 &rr_unsignedp, &volatilep, &rr_mask,
5199 &rr_and_mask);
5201 /* It must be true that the inner operation on the lhs of each
5202 comparison must be the same if we are to be able to do anything.
5203 Then see if we have constants. If not, the same must be true for
5204 the rhs's. */
5205 if (volatilep || ll_inner == 0 || rl_inner == 0
5206 || ! operand_equal_p (ll_inner, rl_inner, 0))
5207 return 0;
5209 if (TREE_CODE (lr_arg) == INTEGER_CST
5210 && TREE_CODE (rr_arg) == INTEGER_CST)
5211 l_const = lr_arg, r_const = rr_arg;
5212 else if (lr_inner == 0 || rr_inner == 0
5213 || ! operand_equal_p (lr_inner, rr_inner, 0))
5214 return 0;
5215 else
5216 l_const = r_const = 0;
5218 /* If either comparison code is not correct for our logical operation,
5219 fail. However, we can convert a one-bit comparison against zero into
5220 the opposite comparison against that bit being set in the field. */
5222 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5223 if (lcode != wanted_code)
5225 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5227 /* Make the left operand unsigned, since we are only interested
5228 in the value of one bit. Otherwise we are doing the wrong
5229 thing below. */
5230 ll_unsignedp = 1;
5231 l_const = ll_mask;
5233 else
5234 return 0;
5237 /* This is analogous to the code for l_const above. */
5238 if (rcode != wanted_code)
5240 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5242 rl_unsignedp = 1;
5243 r_const = rl_mask;
5245 else
5246 return 0;
5249 /* See if we can find a mode that contains both fields being compared on
5250 the left. If we can't, fail. Otherwise, update all constants and masks
5251 to be relative to a field of that size. */
5252 first_bit = MIN (ll_bitpos, rl_bitpos);
5253 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5254 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5255 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5256 volatilep);
5257 if (lnmode == VOIDmode)
5258 return 0;
5260 lnbitsize = GET_MODE_BITSIZE (lnmode);
5261 lnbitpos = first_bit & ~ (lnbitsize - 1);
5262 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5263 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5265 if (BYTES_BIG_ENDIAN)
5267 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5268 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5271 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5272 size_int (xll_bitpos));
5273 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5274 size_int (xrl_bitpos));
5276 if (l_const)
5278 l_const = fold_convert_loc (loc, lntype, l_const);
5279 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5280 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5281 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5282 fold_build1_loc (loc, BIT_NOT_EXPR,
5283 lntype, ll_mask))))
5285 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5287 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5290 if (r_const)
5292 r_const = fold_convert_loc (loc, lntype, r_const);
5293 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5294 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5295 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5296 fold_build1_loc (loc, BIT_NOT_EXPR,
5297 lntype, rl_mask))))
5299 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5301 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5305 /* If the right sides are not constant, do the same for it. Also,
5306 disallow this optimization if a size or signedness mismatch occurs
5307 between the left and right sides. */
5308 if (l_const == 0)
5310 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5311 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5312 /* Make sure the two fields on the right
5313 correspond to the left without being swapped. */
5314 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5315 return 0;
5317 first_bit = MIN (lr_bitpos, rr_bitpos);
5318 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5319 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5320 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5321 volatilep);
5322 if (rnmode == VOIDmode)
5323 return 0;
5325 rnbitsize = GET_MODE_BITSIZE (rnmode);
5326 rnbitpos = first_bit & ~ (rnbitsize - 1);
5327 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5328 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5330 if (BYTES_BIG_ENDIAN)
5332 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5333 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5336 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5337 rntype, lr_mask),
5338 size_int (xlr_bitpos));
5339 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5340 rntype, rr_mask),
5341 size_int (xrr_bitpos));
5343 /* Make a mask that corresponds to both fields being compared.
5344 Do this for both items being compared. If the operands are the
5345 same size and the bits being compared are in the same position
5346 then we can do this by masking both and comparing the masked
5347 results. */
5348 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5349 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5350 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5352 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5353 ll_unsignedp || rl_unsignedp);
5354 if (! all_ones_mask_p (ll_mask, lnbitsize))
5355 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5357 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5358 lr_unsignedp || rr_unsignedp);
5359 if (! all_ones_mask_p (lr_mask, rnbitsize))
5360 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5362 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5365 /* There is still another way we can do something: If both pairs of
5366 fields being compared are adjacent, we may be able to make a wider
5367 field containing them both.
5369 Note that we still must mask the lhs/rhs expressions. Furthermore,
5370 the mask must be shifted to account for the shift done by
5371 make_bit_field_ref. */
5372 if ((ll_bitsize + ll_bitpos == rl_bitpos
5373 && lr_bitsize + lr_bitpos == rr_bitpos)
5374 || (ll_bitpos == rl_bitpos + rl_bitsize
5375 && lr_bitpos == rr_bitpos + rr_bitsize))
5377 tree type;
5379 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5380 ll_bitsize + rl_bitsize,
5381 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5382 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5383 lr_bitsize + rr_bitsize,
5384 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5386 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5387 size_int (MIN (xll_bitpos, xrl_bitpos)));
5388 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5389 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5391 /* Convert to the smaller type before masking out unwanted bits. */
5392 type = lntype;
5393 if (lntype != rntype)
5395 if (lnbitsize > rnbitsize)
5397 lhs = fold_convert_loc (loc, rntype, lhs);
5398 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5399 type = rntype;
5401 else if (lnbitsize < rnbitsize)
5403 rhs = fold_convert_loc (loc, lntype, rhs);
5404 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5405 type = lntype;
5409 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5410 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5412 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5413 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5415 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5418 return 0;
5421 /* Handle the case of comparisons with constants. If there is something in
5422 common between the masks, those bits of the constants must be the same.
5423 If not, the condition is always false. Test for this to avoid generating
5424 incorrect code below. */
5425 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5426 if (! integer_zerop (result)
5427 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5428 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5430 if (wanted_code == NE_EXPR)
5432 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5433 return constant_boolean_node (true, truth_type);
5435 else
5437 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5438 return constant_boolean_node (false, truth_type);
5442 /* Construct the expression we will return. First get the component
5443 reference we will make. Unless the mask is all ones the width of
5444 that field, perform the mask operation. Then compare with the
5445 merged constant. */
5446 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5447 ll_unsignedp || rl_unsignedp);
5449 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5450 if (! all_ones_mask_p (ll_mask, lnbitsize))
5451 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5453 return build2_loc (loc, wanted_code, truth_type, result,
5454 const_binop (BIT_IOR_EXPR, l_const, r_const));
5457 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5458 constant. */
5460 static tree
5461 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5462 tree op0, tree op1)
5464 tree arg0 = op0;
5465 enum tree_code op_code;
5466 tree comp_const;
5467 tree minmax_const;
5468 int consts_equal, consts_lt;
5469 tree inner;
5471 STRIP_SIGN_NOPS (arg0);
5473 op_code = TREE_CODE (arg0);
5474 minmax_const = TREE_OPERAND (arg0, 1);
5475 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5476 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5477 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5478 inner = TREE_OPERAND (arg0, 0);
5480 /* If something does not permit us to optimize, return the original tree. */
5481 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5482 || TREE_CODE (comp_const) != INTEGER_CST
5483 || TREE_OVERFLOW (comp_const)
5484 || TREE_CODE (minmax_const) != INTEGER_CST
5485 || TREE_OVERFLOW (minmax_const))
5486 return NULL_TREE;
5488 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5489 and GT_EXPR, doing the rest with recursive calls using logical
5490 simplifications. */
5491 switch (code)
5493 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5495 tree tem
5496 = optimize_minmax_comparison (loc,
5497 invert_tree_comparison (code, false),
5498 type, op0, op1);
5499 if (tem)
5500 return invert_truthvalue_loc (loc, tem);
5501 return NULL_TREE;
5504 case GE_EXPR:
5505 return
5506 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5507 optimize_minmax_comparison
5508 (loc, EQ_EXPR, type, arg0, comp_const),
5509 optimize_minmax_comparison
5510 (loc, GT_EXPR, type, arg0, comp_const));
5512 case EQ_EXPR:
5513 if (op_code == MAX_EXPR && consts_equal)
5514 /* MAX (X, 0) == 0 -> X <= 0 */
5515 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5517 else if (op_code == MAX_EXPR && consts_lt)
5518 /* MAX (X, 0) == 5 -> X == 5 */
5519 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5521 else if (op_code == MAX_EXPR)
5522 /* MAX (X, 0) == -1 -> false */
5523 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5525 else if (consts_equal)
5526 /* MIN (X, 0) == 0 -> X >= 0 */
5527 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5529 else if (consts_lt)
5530 /* MIN (X, 0) == 5 -> false */
5531 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5533 else
5534 /* MIN (X, 0) == -1 -> X == -1 */
5535 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5537 case GT_EXPR:
5538 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5539 /* MAX (X, 0) > 0 -> X > 0
5540 MAX (X, 0) > 5 -> X > 5 */
5541 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5543 else if (op_code == MAX_EXPR)
5544 /* MAX (X, 0) > -1 -> true */
5545 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5547 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5548 /* MIN (X, 0) > 0 -> false
5549 MIN (X, 0) > 5 -> false */
5550 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5552 else
5553 /* MIN (X, 0) > -1 -> X > -1 */
5554 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5556 default:
5557 return NULL_TREE;
5561 /* T is an integer expression that is being multiplied, divided, or taken a
5562 modulus (CODE says which and what kind of divide or modulus) by a
5563 constant C. See if we can eliminate that operation by folding it with
5564 other operations already in T. WIDE_TYPE, if non-null, is a type that
5565 should be used for the computation if wider than our type.
5567 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5568 (X * 2) + (Y * 4). We must, however, be assured that either the original
5569 expression would not overflow or that overflow is undefined for the type
5570 in the language in question.
5572 If we return a non-null expression, it is an equivalent form of the
5573 original computation, but need not be in the original type.
5575 We set *STRICT_OVERFLOW_P to true if the return values depends on
5576 signed overflow being undefined. Otherwise we do not change
5577 *STRICT_OVERFLOW_P. */
5579 static tree
5580 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5581 bool *strict_overflow_p)
5583 /* To avoid exponential search depth, refuse to allow recursion past
5584 three levels. Beyond that (1) it's highly unlikely that we'll find
5585 something interesting and (2) we've probably processed it before
5586 when we built the inner expression. */
5588 static int depth;
5589 tree ret;
5591 if (depth > 3)
5592 return NULL;
5594 depth++;
5595 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5596 depth--;
5598 return ret;
5601 static tree
5602 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5603 bool *strict_overflow_p)
5605 tree type = TREE_TYPE (t);
5606 enum tree_code tcode = TREE_CODE (t);
5607 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5608 > GET_MODE_SIZE (TYPE_MODE (type)))
5609 ? wide_type : type);
5610 tree t1, t2;
5611 int same_p = tcode == code;
5612 tree op0 = NULL_TREE, op1 = NULL_TREE;
5613 bool sub_strict_overflow_p;
5615 /* Don't deal with constants of zero here; they confuse the code below. */
5616 if (integer_zerop (c))
5617 return NULL_TREE;
5619 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5620 op0 = TREE_OPERAND (t, 0);
5622 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5623 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5625 /* Note that we need not handle conditional operations here since fold
5626 already handles those cases. So just do arithmetic here. */
5627 switch (tcode)
5629 case INTEGER_CST:
5630 /* For a constant, we can always simplify if we are a multiply
5631 or (for divide and modulus) if it is a multiple of our constant. */
5632 if (code == MULT_EXPR
5633 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5634 return const_binop (code, fold_convert (ctype, t),
5635 fold_convert (ctype, c));
5636 break;
5638 CASE_CONVERT: case NON_LVALUE_EXPR:
5639 /* If op0 is an expression ... */
5640 if ((COMPARISON_CLASS_P (op0)
5641 || UNARY_CLASS_P (op0)
5642 || BINARY_CLASS_P (op0)
5643 || VL_EXP_CLASS_P (op0)
5644 || EXPRESSION_CLASS_P (op0))
5645 /* ... and has wrapping overflow, and its type is smaller
5646 than ctype, then we cannot pass through as widening. */
5647 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5648 && (TYPE_PRECISION (ctype)
5649 > TYPE_PRECISION (TREE_TYPE (op0))))
5650 /* ... or this is a truncation (t is narrower than op0),
5651 then we cannot pass through this narrowing. */
5652 || (TYPE_PRECISION (type)
5653 < TYPE_PRECISION (TREE_TYPE (op0)))
5654 /* ... or signedness changes for division or modulus,
5655 then we cannot pass through this conversion. */
5656 || (code != MULT_EXPR
5657 && (TYPE_UNSIGNED (ctype)
5658 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5659 /* ... or has undefined overflow while the converted to
5660 type has not, we cannot do the operation in the inner type
5661 as that would introduce undefined overflow. */
5662 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5663 && !TYPE_OVERFLOW_UNDEFINED (type))))
5664 break;
5666 /* Pass the constant down and see if we can make a simplification. If
5667 we can, replace this expression with the inner simplification for
5668 possible later conversion to our or some other type. */
5669 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5670 && TREE_CODE (t2) == INTEGER_CST
5671 && !TREE_OVERFLOW (t2)
5672 && (0 != (t1 = extract_muldiv (op0, t2, code,
5673 code == MULT_EXPR
5674 ? ctype : NULL_TREE,
5675 strict_overflow_p))))
5676 return t1;
5677 break;
5679 case ABS_EXPR:
5680 /* If widening the type changes it from signed to unsigned, then we
5681 must avoid building ABS_EXPR itself as unsigned. */
5682 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5684 tree cstype = (*signed_type_for) (ctype);
5685 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5686 != 0)
5688 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5689 return fold_convert (ctype, t1);
5691 break;
5693 /* If the constant is negative, we cannot simplify this. */
5694 if (tree_int_cst_sgn (c) == -1)
5695 break;
5696 /* FALLTHROUGH */
5697 case NEGATE_EXPR:
5698 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5699 != 0)
5700 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5701 break;
5703 case MIN_EXPR: case MAX_EXPR:
5704 /* If widening the type changes the signedness, then we can't perform
5705 this optimization as that changes the result. */
5706 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5707 break;
5709 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5710 sub_strict_overflow_p = false;
5711 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5712 &sub_strict_overflow_p)) != 0
5713 && (t2 = extract_muldiv (op1, c, code, wide_type,
5714 &sub_strict_overflow_p)) != 0)
5716 if (tree_int_cst_sgn (c) < 0)
5717 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5718 if (sub_strict_overflow_p)
5719 *strict_overflow_p = true;
5720 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5721 fold_convert (ctype, t2));
5723 break;
5725 case LSHIFT_EXPR: case RSHIFT_EXPR:
5726 /* If the second operand is constant, this is a multiplication
5727 or floor division, by a power of two, so we can treat it that
5728 way unless the multiplier or divisor overflows. Signed
5729 left-shift overflow is implementation-defined rather than
5730 undefined in C90, so do not convert signed left shift into
5731 multiplication. */
5732 if (TREE_CODE (op1) == INTEGER_CST
5733 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5734 /* const_binop may not detect overflow correctly,
5735 so check for it explicitly here. */
5736 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5737 && TREE_INT_CST_HIGH (op1) == 0
5738 && 0 != (t1 = fold_convert (ctype,
5739 const_binop (LSHIFT_EXPR,
5740 size_one_node,
5741 op1)))
5742 && !TREE_OVERFLOW (t1))
5743 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5744 ? MULT_EXPR : FLOOR_DIV_EXPR,
5745 ctype,
5746 fold_convert (ctype, op0),
5747 t1),
5748 c, code, wide_type, strict_overflow_p);
5749 break;
5751 case PLUS_EXPR: case MINUS_EXPR:
5752 /* See if we can eliminate the operation on both sides. If we can, we
5753 can return a new PLUS or MINUS. If we can't, the only remaining
5754 cases where we can do anything are if the second operand is a
5755 constant. */
5756 sub_strict_overflow_p = false;
5757 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5758 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5759 if (t1 != 0 && t2 != 0
5760 && (code == MULT_EXPR
5761 /* If not multiplication, we can only do this if both operands
5762 are divisible by c. */
5763 || (multiple_of_p (ctype, op0, c)
5764 && multiple_of_p (ctype, op1, c))))
5766 if (sub_strict_overflow_p)
5767 *strict_overflow_p = true;
5768 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5769 fold_convert (ctype, t2));
5772 /* If this was a subtraction, negate OP1 and set it to be an addition.
5773 This simplifies the logic below. */
5774 if (tcode == MINUS_EXPR)
5776 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5777 /* If OP1 was not easily negatable, the constant may be OP0. */
5778 if (TREE_CODE (op0) == INTEGER_CST)
5780 tree tem = op0;
5781 op0 = op1;
5782 op1 = tem;
5783 tem = t1;
5784 t1 = t2;
5785 t2 = tem;
5789 if (TREE_CODE (op1) != INTEGER_CST)
5790 break;
5792 /* If either OP1 or C are negative, this optimization is not safe for
5793 some of the division and remainder types while for others we need
5794 to change the code. */
5795 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5797 if (code == CEIL_DIV_EXPR)
5798 code = FLOOR_DIV_EXPR;
5799 else if (code == FLOOR_DIV_EXPR)
5800 code = CEIL_DIV_EXPR;
5801 else if (code != MULT_EXPR
5802 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5803 break;
5806 /* If it's a multiply or a division/modulus operation of a multiple
5807 of our constant, do the operation and verify it doesn't overflow. */
5808 if (code == MULT_EXPR
5809 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5811 op1 = const_binop (code, fold_convert (ctype, op1),
5812 fold_convert (ctype, c));
5813 /* We allow the constant to overflow with wrapping semantics. */
5814 if (op1 == 0
5815 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5816 break;
5818 else
5819 break;
5821 /* If we have an unsigned type, we cannot widen the operation since it
5822 will change the result if the original computation overflowed. */
5823 if (TYPE_UNSIGNED (ctype) && ctype != type)
5824 break;
5826 /* If we were able to eliminate our operation from the first side,
5827 apply our operation to the second side and reform the PLUS. */
5828 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5829 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5831 /* The last case is if we are a multiply. In that case, we can
5832 apply the distributive law to commute the multiply and addition
5833 if the multiplication of the constants doesn't overflow. */
5834 if (code == MULT_EXPR)
5835 return fold_build2 (tcode, ctype,
5836 fold_build2 (code, ctype,
5837 fold_convert (ctype, op0),
5838 fold_convert (ctype, c)),
5839 op1);
5841 break;
5843 case MULT_EXPR:
5844 /* We have a special case here if we are doing something like
5845 (C * 8) % 4 since we know that's zero. */
5846 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5847 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5848 /* If the multiplication can overflow we cannot optimize this. */
5849 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5850 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5851 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5853 *strict_overflow_p = true;
5854 return omit_one_operand (type, integer_zero_node, op0);
5857 /* ... fall through ... */
5859 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5860 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5861 /* If we can extract our operation from the LHS, do so and return a
5862 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5863 do something only if the second operand is a constant. */
5864 if (same_p
5865 && (t1 = extract_muldiv (op0, c, code, wide_type,
5866 strict_overflow_p)) != 0)
5867 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5868 fold_convert (ctype, op1));
5869 else if (tcode == MULT_EXPR && code == MULT_EXPR
5870 && (t1 = extract_muldiv (op1, c, code, wide_type,
5871 strict_overflow_p)) != 0)
5872 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5873 fold_convert (ctype, t1));
5874 else if (TREE_CODE (op1) != INTEGER_CST)
5875 return 0;
5877 /* If these are the same operation types, we can associate them
5878 assuming no overflow. */
5879 if (tcode == code)
5881 double_int mul;
5882 bool overflow_p;
5883 unsigned prec = TYPE_PRECISION (ctype);
5884 bool uns = TYPE_UNSIGNED (ctype);
5885 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5886 double_int dic = tree_to_double_int (c).ext (prec, uns);
5887 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5888 overflow_p = ((!uns && overflow_p)
5889 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5890 if (!double_int_fits_to_tree_p (ctype, mul)
5891 && ((uns && tcode != MULT_EXPR) || !uns))
5892 overflow_p = 1;
5893 if (!overflow_p)
5894 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5895 double_int_to_tree (ctype, mul));
5898 /* If these operations "cancel" each other, we have the main
5899 optimizations of this pass, which occur when either constant is a
5900 multiple of the other, in which case we replace this with either an
5901 operation or CODE or TCODE.
5903 If we have an unsigned type, we cannot do this since it will change
5904 the result if the original computation overflowed. */
5905 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5906 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5907 || (tcode == MULT_EXPR
5908 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5909 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5910 && code != MULT_EXPR)))
5912 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5914 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5915 *strict_overflow_p = true;
5916 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5917 fold_convert (ctype,
5918 const_binop (TRUNC_DIV_EXPR,
5919 op1, c)));
5921 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5923 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5924 *strict_overflow_p = true;
5925 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5926 fold_convert (ctype,
5927 const_binop (TRUNC_DIV_EXPR,
5928 c, op1)));
5931 break;
5933 default:
5934 break;
5937 return 0;
5940 /* Return a node which has the indicated constant VALUE (either 0 or
5941 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5942 and is of the indicated TYPE. */
5944 tree
5945 constant_boolean_node (bool value, tree type)
5947 if (type == integer_type_node)
5948 return value ? integer_one_node : integer_zero_node;
5949 else if (type == boolean_type_node)
5950 return value ? boolean_true_node : boolean_false_node;
5951 else if (TREE_CODE (type) == VECTOR_TYPE)
5952 return build_vector_from_val (type,
5953 build_int_cst (TREE_TYPE (type),
5954 value ? -1 : 0));
5955 else
5956 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5960 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5961 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5962 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5963 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5964 COND is the first argument to CODE; otherwise (as in the example
5965 given here), it is the second argument. TYPE is the type of the
5966 original expression. Return NULL_TREE if no simplification is
5967 possible. */
5969 static tree
5970 fold_binary_op_with_conditional_arg (location_t loc,
5971 enum tree_code code,
5972 tree type, tree op0, tree op1,
5973 tree cond, tree arg, int cond_first_p)
5975 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5976 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5977 tree test, true_value, false_value;
5978 tree lhs = NULL_TREE;
5979 tree rhs = NULL_TREE;
5980 enum tree_code cond_code = COND_EXPR;
5982 if (TREE_CODE (cond) == COND_EXPR
5983 || TREE_CODE (cond) == VEC_COND_EXPR)
5985 test = TREE_OPERAND (cond, 0);
5986 true_value = TREE_OPERAND (cond, 1);
5987 false_value = TREE_OPERAND (cond, 2);
5988 /* If this operand throws an expression, then it does not make
5989 sense to try to perform a logical or arithmetic operation
5990 involving it. */
5991 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5992 lhs = true_value;
5993 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5994 rhs = false_value;
5996 else
5998 tree testtype = TREE_TYPE (cond);
5999 test = cond;
6000 true_value = constant_boolean_node (true, testtype);
6001 false_value = constant_boolean_node (false, testtype);
6004 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6005 cond_code = VEC_COND_EXPR;
6007 /* This transformation is only worthwhile if we don't have to wrap ARG
6008 in a SAVE_EXPR and the operation can be simplified without recursing
6009 on at least one of the branches once its pushed inside the COND_EXPR. */
6010 if (!TREE_CONSTANT (arg)
6011 && (TREE_SIDE_EFFECTS (arg)
6012 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6013 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6014 return NULL_TREE;
6016 arg = fold_convert_loc (loc, arg_type, arg);
6017 if (lhs == 0)
6019 true_value = fold_convert_loc (loc, cond_type, true_value);
6020 if (cond_first_p)
6021 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6022 else
6023 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6025 if (rhs == 0)
6027 false_value = fold_convert_loc (loc, cond_type, false_value);
6028 if (cond_first_p)
6029 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6030 else
6031 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6034 /* Check that we have simplified at least one of the branches. */
6035 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6036 return NULL_TREE;
6038 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6042 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6044 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6045 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6046 ADDEND is the same as X.
6048 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6049 and finite. The problematic cases are when X is zero, and its mode
6050 has signed zeros. In the case of rounding towards -infinity,
6051 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6052 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6054 bool
6055 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6057 if (!real_zerop (addend))
6058 return false;
6060 /* Don't allow the fold with -fsignaling-nans. */
6061 if (HONOR_SNANS (TYPE_MODE (type)))
6062 return false;
6064 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6065 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6066 return true;
6068 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6069 if (TREE_CODE (addend) == REAL_CST
6070 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6071 negate = !negate;
6073 /* The mode has signed zeros, and we have to honor their sign.
6074 In this situation, there is only one case we can return true for.
6075 X - 0 is the same as X unless rounding towards -infinity is
6076 supported. */
6077 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6080 /* Subroutine of fold() that checks comparisons of built-in math
6081 functions against real constants.
6083 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6084 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6085 is the type of the result and ARG0 and ARG1 are the operands of the
6086 comparison. ARG1 must be a TREE_REAL_CST.
6088 The function returns the constant folded tree if a simplification
6089 can be made, and NULL_TREE otherwise. */
6091 static tree
6092 fold_mathfn_compare (location_t loc,
6093 enum built_in_function fcode, enum tree_code code,
6094 tree type, tree arg0, tree arg1)
6096 REAL_VALUE_TYPE c;
6098 if (BUILTIN_SQRT_P (fcode))
6100 tree arg = CALL_EXPR_ARG (arg0, 0);
6101 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6103 c = TREE_REAL_CST (arg1);
6104 if (REAL_VALUE_NEGATIVE (c))
6106 /* sqrt(x) < y is always false, if y is negative. */
6107 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6108 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6110 /* sqrt(x) > y is always true, if y is negative and we
6111 don't care about NaNs, i.e. negative values of x. */
6112 if (code == NE_EXPR || !HONOR_NANS (mode))
6113 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6115 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6116 return fold_build2_loc (loc, GE_EXPR, type, arg,
6117 build_real (TREE_TYPE (arg), dconst0));
6119 else if (code == GT_EXPR || code == GE_EXPR)
6121 REAL_VALUE_TYPE c2;
6123 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6124 real_convert (&c2, mode, &c2);
6126 if (REAL_VALUE_ISINF (c2))
6128 /* sqrt(x) > y is x == +Inf, when y is very large. */
6129 if (HONOR_INFINITIES (mode))
6130 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6131 build_real (TREE_TYPE (arg), c2));
6133 /* sqrt(x) > y is always false, when y is very large
6134 and we don't care about infinities. */
6135 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6138 /* sqrt(x) > c is the same as x > c*c. */
6139 return fold_build2_loc (loc, code, type, arg,
6140 build_real (TREE_TYPE (arg), c2));
6142 else if (code == LT_EXPR || code == LE_EXPR)
6144 REAL_VALUE_TYPE c2;
6146 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6147 real_convert (&c2, mode, &c2);
6149 if (REAL_VALUE_ISINF (c2))
6151 /* sqrt(x) < y is always true, when y is a very large
6152 value and we don't care about NaNs or Infinities. */
6153 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6154 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6156 /* sqrt(x) < y is x != +Inf when y is very large and we
6157 don't care about NaNs. */
6158 if (! HONOR_NANS (mode))
6159 return fold_build2_loc (loc, NE_EXPR, type, arg,
6160 build_real (TREE_TYPE (arg), c2));
6162 /* sqrt(x) < y is x >= 0 when y is very large and we
6163 don't care about Infinities. */
6164 if (! HONOR_INFINITIES (mode))
6165 return fold_build2_loc (loc, GE_EXPR, type, arg,
6166 build_real (TREE_TYPE (arg), dconst0));
6168 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6169 arg = save_expr (arg);
6170 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6171 fold_build2_loc (loc, GE_EXPR, type, arg,
6172 build_real (TREE_TYPE (arg),
6173 dconst0)),
6174 fold_build2_loc (loc, NE_EXPR, type, arg,
6175 build_real (TREE_TYPE (arg),
6176 c2)));
6179 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6180 if (! HONOR_NANS (mode))
6181 return fold_build2_loc (loc, code, type, arg,
6182 build_real (TREE_TYPE (arg), c2));
6184 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6185 arg = save_expr (arg);
6186 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6187 fold_build2_loc (loc, GE_EXPR, type, arg,
6188 build_real (TREE_TYPE (arg),
6189 dconst0)),
6190 fold_build2_loc (loc, code, type, arg,
6191 build_real (TREE_TYPE (arg),
6192 c2)));
6196 return NULL_TREE;
6199 /* Subroutine of fold() that optimizes comparisons against Infinities,
6200 either +Inf or -Inf.
6202 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6203 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6204 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6206 The function returns the constant folded tree if a simplification
6207 can be made, and NULL_TREE otherwise. */
6209 static tree
6210 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6211 tree arg0, tree arg1)
6213 enum machine_mode mode;
6214 REAL_VALUE_TYPE max;
6215 tree temp;
6216 bool neg;
6218 mode = TYPE_MODE (TREE_TYPE (arg0));
6220 /* For negative infinity swap the sense of the comparison. */
6221 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6222 if (neg)
6223 code = swap_tree_comparison (code);
6225 switch (code)
6227 case GT_EXPR:
6228 /* x > +Inf is always false, if with ignore sNANs. */
6229 if (HONOR_SNANS (mode))
6230 return NULL_TREE;
6231 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6233 case LE_EXPR:
6234 /* x <= +Inf is always true, if we don't case about NaNs. */
6235 if (! HONOR_NANS (mode))
6236 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6238 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6239 arg0 = save_expr (arg0);
6240 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6242 case EQ_EXPR:
6243 case GE_EXPR:
6244 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6245 real_maxval (&max, neg, mode);
6246 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6247 arg0, build_real (TREE_TYPE (arg0), max));
6249 case LT_EXPR:
6250 /* x < +Inf is always equal to x <= DBL_MAX. */
6251 real_maxval (&max, neg, mode);
6252 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6253 arg0, build_real (TREE_TYPE (arg0), max));
6255 case NE_EXPR:
6256 /* x != +Inf is always equal to !(x > DBL_MAX). */
6257 real_maxval (&max, neg, mode);
6258 if (! HONOR_NANS (mode))
6259 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6260 arg0, build_real (TREE_TYPE (arg0), max));
6262 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6263 arg0, build_real (TREE_TYPE (arg0), max));
6264 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6266 default:
6267 break;
6270 return NULL_TREE;
6273 /* Subroutine of fold() that optimizes comparisons of a division by
6274 a nonzero integer constant against an integer constant, i.e.
6275 X/C1 op C2.
6277 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6278 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6279 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6281 The function returns the constant folded tree if a simplification
6282 can be made, and NULL_TREE otherwise. */
6284 static tree
6285 fold_div_compare (location_t loc,
6286 enum tree_code code, tree type, tree arg0, tree arg1)
6288 tree prod, tmp, hi, lo;
6289 tree arg00 = TREE_OPERAND (arg0, 0);
6290 tree arg01 = TREE_OPERAND (arg0, 1);
6291 double_int val;
6292 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6293 bool neg_overflow;
6294 bool overflow;
6296 /* We have to do this the hard way to detect unsigned overflow.
6297 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6298 val = TREE_INT_CST (arg01)
6299 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6300 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6301 neg_overflow = false;
6303 if (unsigned_p)
6305 tmp = int_const_binop (MINUS_EXPR, arg01,
6306 build_int_cst (TREE_TYPE (arg01), 1));
6307 lo = prod;
6309 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6310 val = TREE_INT_CST (prod)
6311 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6312 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6313 -1, overflow | TREE_OVERFLOW (prod));
6315 else if (tree_int_cst_sgn (arg01) >= 0)
6317 tmp = int_const_binop (MINUS_EXPR, arg01,
6318 build_int_cst (TREE_TYPE (arg01), 1));
6319 switch (tree_int_cst_sgn (arg1))
6321 case -1:
6322 neg_overflow = true;
6323 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6324 hi = prod;
6325 break;
6327 case 0:
6328 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6329 hi = tmp;
6330 break;
6332 case 1:
6333 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6334 lo = prod;
6335 break;
6337 default:
6338 gcc_unreachable ();
6341 else
6343 /* A negative divisor reverses the relational operators. */
6344 code = swap_tree_comparison (code);
6346 tmp = int_const_binop (PLUS_EXPR, arg01,
6347 build_int_cst (TREE_TYPE (arg01), 1));
6348 switch (tree_int_cst_sgn (arg1))
6350 case -1:
6351 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6352 lo = prod;
6353 break;
6355 case 0:
6356 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6357 lo = tmp;
6358 break;
6360 case 1:
6361 neg_overflow = true;
6362 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6363 hi = prod;
6364 break;
6366 default:
6367 gcc_unreachable ();
6371 switch (code)
6373 case EQ_EXPR:
6374 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6375 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6376 if (TREE_OVERFLOW (hi))
6377 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6378 if (TREE_OVERFLOW (lo))
6379 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6380 return build_range_check (loc, type, arg00, 1, lo, hi);
6382 case NE_EXPR:
6383 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6384 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6385 if (TREE_OVERFLOW (hi))
6386 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6387 if (TREE_OVERFLOW (lo))
6388 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6389 return build_range_check (loc, type, arg00, 0, lo, hi);
6391 case LT_EXPR:
6392 if (TREE_OVERFLOW (lo))
6394 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6395 return omit_one_operand_loc (loc, type, tmp, arg00);
6397 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6399 case LE_EXPR:
6400 if (TREE_OVERFLOW (hi))
6402 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6403 return omit_one_operand_loc (loc, type, tmp, arg00);
6405 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6407 case GT_EXPR:
6408 if (TREE_OVERFLOW (hi))
6410 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6411 return omit_one_operand_loc (loc, type, tmp, arg00);
6413 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6415 case GE_EXPR:
6416 if (TREE_OVERFLOW (lo))
6418 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6419 return omit_one_operand_loc (loc, type, tmp, arg00);
6421 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6423 default:
6424 break;
6427 return NULL_TREE;
6431 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6432 equality/inequality test, then return a simplified form of the test
6433 using a sign testing. Otherwise return NULL. TYPE is the desired
6434 result type. */
6436 static tree
6437 fold_single_bit_test_into_sign_test (location_t loc,
6438 enum tree_code code, tree arg0, tree arg1,
6439 tree result_type)
6441 /* If this is testing a single bit, we can optimize the test. */
6442 if ((code == NE_EXPR || code == EQ_EXPR)
6443 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6444 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6446 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6447 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6448 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6450 if (arg00 != NULL_TREE
6451 /* This is only a win if casting to a signed type is cheap,
6452 i.e. when arg00's type is not a partial mode. */
6453 && TYPE_PRECISION (TREE_TYPE (arg00))
6454 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6456 tree stype = signed_type_for (TREE_TYPE (arg00));
6457 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6458 result_type,
6459 fold_convert_loc (loc, stype, arg00),
6460 build_int_cst (stype, 0));
6464 return NULL_TREE;
6467 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6468 equality/inequality test, then return a simplified form of
6469 the test using shifts and logical operations. Otherwise return
6470 NULL. TYPE is the desired result type. */
6472 tree
6473 fold_single_bit_test (location_t loc, enum tree_code code,
6474 tree arg0, tree arg1, tree result_type)
6476 /* If this is testing a single bit, we can optimize the test. */
6477 if ((code == NE_EXPR || code == EQ_EXPR)
6478 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6479 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6481 tree inner = TREE_OPERAND (arg0, 0);
6482 tree type = TREE_TYPE (arg0);
6483 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6484 enum machine_mode operand_mode = TYPE_MODE (type);
6485 int ops_unsigned;
6486 tree signed_type, unsigned_type, intermediate_type;
6487 tree tem, one;
6489 /* First, see if we can fold the single bit test into a sign-bit
6490 test. */
6491 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6492 result_type);
6493 if (tem)
6494 return tem;
6496 /* Otherwise we have (A & C) != 0 where C is a single bit,
6497 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6498 Similarly for (A & C) == 0. */
6500 /* If INNER is a right shift of a constant and it plus BITNUM does
6501 not overflow, adjust BITNUM and INNER. */
6502 if (TREE_CODE (inner) == RSHIFT_EXPR
6503 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6504 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6505 && bitnum < TYPE_PRECISION (type)
6506 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6507 bitnum - TYPE_PRECISION (type)))
6509 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6510 inner = TREE_OPERAND (inner, 0);
6513 /* If we are going to be able to omit the AND below, we must do our
6514 operations as unsigned. If we must use the AND, we have a choice.
6515 Normally unsigned is faster, but for some machines signed is. */
6516 #ifdef LOAD_EXTEND_OP
6517 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6518 && !flag_syntax_only) ? 0 : 1;
6519 #else
6520 ops_unsigned = 1;
6521 #endif
6523 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6524 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6525 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6526 inner = fold_convert_loc (loc, intermediate_type, inner);
6528 if (bitnum != 0)
6529 inner = build2 (RSHIFT_EXPR, intermediate_type,
6530 inner, size_int (bitnum));
6532 one = build_int_cst (intermediate_type, 1);
6534 if (code == EQ_EXPR)
6535 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6537 /* Put the AND last so it can combine with more things. */
6538 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6540 /* Make sure to return the proper type. */
6541 inner = fold_convert_loc (loc, result_type, inner);
6543 return inner;
6545 return NULL_TREE;
6548 /* Check whether we are allowed to reorder operands arg0 and arg1,
6549 such that the evaluation of arg1 occurs before arg0. */
6551 static bool
6552 reorder_operands_p (const_tree arg0, const_tree arg1)
6554 if (! flag_evaluation_order)
6555 return true;
6556 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6557 return true;
6558 return ! TREE_SIDE_EFFECTS (arg0)
6559 && ! TREE_SIDE_EFFECTS (arg1);
6562 /* Test whether it is preferable two swap two operands, ARG0 and
6563 ARG1, for example because ARG0 is an integer constant and ARG1
6564 isn't. If REORDER is true, only recommend swapping if we can
6565 evaluate the operands in reverse order. */
6567 bool
6568 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6570 STRIP_SIGN_NOPS (arg0);
6571 STRIP_SIGN_NOPS (arg1);
6573 if (TREE_CODE (arg1) == INTEGER_CST)
6574 return 0;
6575 if (TREE_CODE (arg0) == INTEGER_CST)
6576 return 1;
6578 if (TREE_CODE (arg1) == REAL_CST)
6579 return 0;
6580 if (TREE_CODE (arg0) == REAL_CST)
6581 return 1;
6583 if (TREE_CODE (arg1) == FIXED_CST)
6584 return 0;
6585 if (TREE_CODE (arg0) == FIXED_CST)
6586 return 1;
6588 if (TREE_CODE (arg1) == COMPLEX_CST)
6589 return 0;
6590 if (TREE_CODE (arg0) == COMPLEX_CST)
6591 return 1;
6593 if (TREE_CONSTANT (arg1))
6594 return 0;
6595 if (TREE_CONSTANT (arg0))
6596 return 1;
6598 if (optimize_function_for_size_p (cfun))
6599 return 0;
6601 if (reorder && flag_evaluation_order
6602 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6603 return 0;
6605 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6606 for commutative and comparison operators. Ensuring a canonical
6607 form allows the optimizers to find additional redundancies without
6608 having to explicitly check for both orderings. */
6609 if (TREE_CODE (arg0) == SSA_NAME
6610 && TREE_CODE (arg1) == SSA_NAME
6611 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6612 return 1;
6614 /* Put SSA_NAMEs last. */
6615 if (TREE_CODE (arg1) == SSA_NAME)
6616 return 0;
6617 if (TREE_CODE (arg0) == SSA_NAME)
6618 return 1;
6620 /* Put variables last. */
6621 if (DECL_P (arg1))
6622 return 0;
6623 if (DECL_P (arg0))
6624 return 1;
6626 return 0;
6629 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6630 ARG0 is extended to a wider type. */
6632 static tree
6633 fold_widened_comparison (location_t loc, enum tree_code code,
6634 tree type, tree arg0, tree arg1)
6636 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6637 tree arg1_unw;
6638 tree shorter_type, outer_type;
6639 tree min, max;
6640 bool above, below;
6642 if (arg0_unw == arg0)
6643 return NULL_TREE;
6644 shorter_type = TREE_TYPE (arg0_unw);
6646 #ifdef HAVE_canonicalize_funcptr_for_compare
6647 /* Disable this optimization if we're casting a function pointer
6648 type on targets that require function pointer canonicalization. */
6649 if (HAVE_canonicalize_funcptr_for_compare
6650 && TREE_CODE (shorter_type) == POINTER_TYPE
6651 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6652 return NULL_TREE;
6653 #endif
6655 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6656 return NULL_TREE;
6658 arg1_unw = get_unwidened (arg1, NULL_TREE);
6660 /* If possible, express the comparison in the shorter mode. */
6661 if ((code == EQ_EXPR || code == NE_EXPR
6662 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6663 && (TREE_TYPE (arg1_unw) == shorter_type
6664 || ((TYPE_PRECISION (shorter_type)
6665 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6666 && (TYPE_UNSIGNED (shorter_type)
6667 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6668 || (TREE_CODE (arg1_unw) == INTEGER_CST
6669 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6670 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6671 && int_fits_type_p (arg1_unw, shorter_type))))
6672 return fold_build2_loc (loc, code, type, arg0_unw,
6673 fold_convert_loc (loc, shorter_type, arg1_unw));
6675 if (TREE_CODE (arg1_unw) != INTEGER_CST
6676 || TREE_CODE (shorter_type) != INTEGER_TYPE
6677 || !int_fits_type_p (arg1_unw, shorter_type))
6678 return NULL_TREE;
6680 /* If we are comparing with the integer that does not fit into the range
6681 of the shorter type, the result is known. */
6682 outer_type = TREE_TYPE (arg1_unw);
6683 min = lower_bound_in_type (outer_type, shorter_type);
6684 max = upper_bound_in_type (outer_type, shorter_type);
6686 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6687 max, arg1_unw));
6688 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6689 arg1_unw, min));
6691 switch (code)
6693 case EQ_EXPR:
6694 if (above || below)
6695 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6696 break;
6698 case NE_EXPR:
6699 if (above || below)
6700 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6701 break;
6703 case LT_EXPR:
6704 case LE_EXPR:
6705 if (above)
6706 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6707 else if (below)
6708 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6710 case GT_EXPR:
6711 case GE_EXPR:
6712 if (above)
6713 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6714 else if (below)
6715 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6717 default:
6718 break;
6721 return NULL_TREE;
6724 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6725 ARG0 just the signedness is changed. */
6727 static tree
6728 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6729 tree arg0, tree arg1)
6731 tree arg0_inner;
6732 tree inner_type, outer_type;
6734 if (!CONVERT_EXPR_P (arg0))
6735 return NULL_TREE;
6737 outer_type = TREE_TYPE (arg0);
6738 arg0_inner = TREE_OPERAND (arg0, 0);
6739 inner_type = TREE_TYPE (arg0_inner);
6741 #ifdef HAVE_canonicalize_funcptr_for_compare
6742 /* Disable this optimization if we're casting a function pointer
6743 type on targets that require function pointer canonicalization. */
6744 if (HAVE_canonicalize_funcptr_for_compare
6745 && TREE_CODE (inner_type) == POINTER_TYPE
6746 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6747 return NULL_TREE;
6748 #endif
6750 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6751 return NULL_TREE;
6753 if (TREE_CODE (arg1) != INTEGER_CST
6754 && !(CONVERT_EXPR_P (arg1)
6755 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6756 return NULL_TREE;
6758 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6759 && code != NE_EXPR
6760 && code != EQ_EXPR)
6761 return NULL_TREE;
6763 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6764 return NULL_TREE;
6766 if (TREE_CODE (arg1) == INTEGER_CST)
6767 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6768 0, TREE_OVERFLOW (arg1));
6769 else
6770 arg1 = fold_convert_loc (loc, inner_type, arg1);
6772 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6775 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6776 step of the array. Reconstructs s and delta in the case of s *
6777 delta being an integer constant (and thus already folded). ADDR is
6778 the address. MULT is the multiplicative expression. If the
6779 function succeeds, the new address expression is returned.
6780 Otherwise NULL_TREE is returned. LOC is the location of the
6781 resulting expression. */
6783 static tree
6784 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6786 tree s, delta, step;
6787 tree ref = TREE_OPERAND (addr, 0), pref;
6788 tree ret, pos;
6789 tree itype;
6790 bool mdim = false;
6792 /* Strip the nops that might be added when converting op1 to sizetype. */
6793 STRIP_NOPS (op1);
6795 /* Canonicalize op1 into a possibly non-constant delta
6796 and an INTEGER_CST s. */
6797 if (TREE_CODE (op1) == MULT_EXPR)
6799 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6801 STRIP_NOPS (arg0);
6802 STRIP_NOPS (arg1);
6804 if (TREE_CODE (arg0) == INTEGER_CST)
6806 s = arg0;
6807 delta = arg1;
6809 else if (TREE_CODE (arg1) == INTEGER_CST)
6811 s = arg1;
6812 delta = arg0;
6814 else
6815 return NULL_TREE;
6817 else if (TREE_CODE (op1) == INTEGER_CST)
6819 delta = op1;
6820 s = NULL_TREE;
6822 else
6824 /* Simulate we are delta * 1. */
6825 delta = op1;
6826 s = integer_one_node;
6829 /* Handle &x.array the same as we would handle &x.array[0]. */
6830 if (TREE_CODE (ref) == COMPONENT_REF
6831 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6833 tree domain;
6835 /* Remember if this was a multi-dimensional array. */
6836 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6837 mdim = true;
6839 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6840 if (! domain)
6841 goto cont;
6842 itype = TREE_TYPE (domain);
6844 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6845 if (TREE_CODE (step) != INTEGER_CST)
6846 goto cont;
6848 if (s)
6850 if (! tree_int_cst_equal (step, s))
6851 goto cont;
6853 else
6855 /* Try if delta is a multiple of step. */
6856 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6857 if (! tmp)
6858 goto cont;
6859 delta = tmp;
6862 /* Only fold here if we can verify we do not overflow one
6863 dimension of a multi-dimensional array. */
6864 if (mdim)
6866 tree tmp;
6868 if (!TYPE_MIN_VALUE (domain)
6869 || !TYPE_MAX_VALUE (domain)
6870 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6871 goto cont;
6873 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6874 fold_convert_loc (loc, itype,
6875 TYPE_MIN_VALUE (domain)),
6876 fold_convert_loc (loc, itype, delta));
6877 if (TREE_CODE (tmp) != INTEGER_CST
6878 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6879 goto cont;
6882 /* We found a suitable component reference. */
6884 pref = TREE_OPERAND (addr, 0);
6885 ret = copy_node (pref);
6886 SET_EXPR_LOCATION (ret, loc);
6888 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6889 fold_build2_loc
6890 (loc, PLUS_EXPR, itype,
6891 fold_convert_loc (loc, itype,
6892 TYPE_MIN_VALUE
6893 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6894 fold_convert_loc (loc, itype, delta)),
6895 NULL_TREE, NULL_TREE);
6896 return build_fold_addr_expr_loc (loc, ret);
6899 cont:
6901 for (;; ref = TREE_OPERAND (ref, 0))
6903 if (TREE_CODE (ref) == ARRAY_REF)
6905 tree domain;
6907 /* Remember if this was a multi-dimensional array. */
6908 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6909 mdim = true;
6911 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6912 if (! domain)
6913 continue;
6914 itype = TREE_TYPE (domain);
6916 step = array_ref_element_size (ref);
6917 if (TREE_CODE (step) != INTEGER_CST)
6918 continue;
6920 if (s)
6922 if (! tree_int_cst_equal (step, s))
6923 continue;
6925 else
6927 /* Try if delta is a multiple of step. */
6928 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6929 if (! tmp)
6930 continue;
6931 delta = tmp;
6934 /* Only fold here if we can verify we do not overflow one
6935 dimension of a multi-dimensional array. */
6936 if (mdim)
6938 tree tmp;
6940 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6941 || !TYPE_MAX_VALUE (domain)
6942 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6943 continue;
6945 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6946 fold_convert_loc (loc, itype,
6947 TREE_OPERAND (ref, 1)),
6948 fold_convert_loc (loc, itype, delta));
6949 if (!tmp
6950 || TREE_CODE (tmp) != INTEGER_CST
6951 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6952 continue;
6955 break;
6957 else
6958 mdim = false;
6960 if (!handled_component_p (ref))
6961 return NULL_TREE;
6964 /* We found the suitable array reference. So copy everything up to it,
6965 and replace the index. */
6967 pref = TREE_OPERAND (addr, 0);
6968 ret = copy_node (pref);
6969 SET_EXPR_LOCATION (ret, loc);
6970 pos = ret;
6972 while (pref != ref)
6974 pref = TREE_OPERAND (pref, 0);
6975 TREE_OPERAND (pos, 0) = copy_node (pref);
6976 pos = TREE_OPERAND (pos, 0);
6979 TREE_OPERAND (pos, 1)
6980 = fold_build2_loc (loc, PLUS_EXPR, itype,
6981 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6982 fold_convert_loc (loc, itype, delta));
6983 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6987 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6988 means A >= Y && A != MAX, but in this case we know that
6989 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6991 static tree
6992 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6994 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6996 if (TREE_CODE (bound) == LT_EXPR)
6997 a = TREE_OPERAND (bound, 0);
6998 else if (TREE_CODE (bound) == GT_EXPR)
6999 a = TREE_OPERAND (bound, 1);
7000 else
7001 return NULL_TREE;
7003 typea = TREE_TYPE (a);
7004 if (!INTEGRAL_TYPE_P (typea)
7005 && !POINTER_TYPE_P (typea))
7006 return NULL_TREE;
7008 if (TREE_CODE (ineq) == LT_EXPR)
7010 a1 = TREE_OPERAND (ineq, 1);
7011 y = TREE_OPERAND (ineq, 0);
7013 else if (TREE_CODE (ineq) == GT_EXPR)
7015 a1 = TREE_OPERAND (ineq, 0);
7016 y = TREE_OPERAND (ineq, 1);
7018 else
7019 return NULL_TREE;
7021 if (TREE_TYPE (a1) != typea)
7022 return NULL_TREE;
7024 if (POINTER_TYPE_P (typea))
7026 /* Convert the pointer types into integer before taking the difference. */
7027 tree ta = fold_convert_loc (loc, ssizetype, a);
7028 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7029 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7031 else
7032 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7034 if (!diff || !integer_onep (diff))
7035 return NULL_TREE;
7037 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7040 /* Fold a sum or difference of at least one multiplication.
7041 Returns the folded tree or NULL if no simplification could be made. */
7043 static tree
7044 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7045 tree arg0, tree arg1)
7047 tree arg00, arg01, arg10, arg11;
7048 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7050 /* (A * C) +- (B * C) -> (A+-B) * C.
7051 (A * C) +- A -> A * (C+-1).
7052 We are most concerned about the case where C is a constant,
7053 but other combinations show up during loop reduction. Since
7054 it is not difficult, try all four possibilities. */
7056 if (TREE_CODE (arg0) == MULT_EXPR)
7058 arg00 = TREE_OPERAND (arg0, 0);
7059 arg01 = TREE_OPERAND (arg0, 1);
7061 else if (TREE_CODE (arg0) == INTEGER_CST)
7063 arg00 = build_one_cst (type);
7064 arg01 = arg0;
7066 else
7068 /* We cannot generate constant 1 for fract. */
7069 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7070 return NULL_TREE;
7071 arg00 = arg0;
7072 arg01 = build_one_cst (type);
7074 if (TREE_CODE (arg1) == MULT_EXPR)
7076 arg10 = TREE_OPERAND (arg1, 0);
7077 arg11 = TREE_OPERAND (arg1, 1);
7079 else if (TREE_CODE (arg1) == INTEGER_CST)
7081 arg10 = build_one_cst (type);
7082 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7083 the purpose of this canonicalization. */
7084 if (TREE_INT_CST_HIGH (arg1) == -1
7085 && negate_expr_p (arg1)
7086 && code == PLUS_EXPR)
7088 arg11 = negate_expr (arg1);
7089 code = MINUS_EXPR;
7091 else
7092 arg11 = arg1;
7094 else
7096 /* We cannot generate constant 1 for fract. */
7097 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7098 return NULL_TREE;
7099 arg10 = arg1;
7100 arg11 = build_one_cst (type);
7102 same = NULL_TREE;
7104 if (operand_equal_p (arg01, arg11, 0))
7105 same = arg01, alt0 = arg00, alt1 = arg10;
7106 else if (operand_equal_p (arg00, arg10, 0))
7107 same = arg00, alt0 = arg01, alt1 = arg11;
7108 else if (operand_equal_p (arg00, arg11, 0))
7109 same = arg00, alt0 = arg01, alt1 = arg10;
7110 else if (operand_equal_p (arg01, arg10, 0))
7111 same = arg01, alt0 = arg00, alt1 = arg11;
7113 /* No identical multiplicands; see if we can find a common
7114 power-of-two factor in non-power-of-two multiplies. This
7115 can help in multi-dimensional array access. */
7116 else if (host_integerp (arg01, 0)
7117 && host_integerp (arg11, 0))
7119 HOST_WIDE_INT int01, int11, tmp;
7120 bool swap = false;
7121 tree maybe_same;
7122 int01 = TREE_INT_CST_LOW (arg01);
7123 int11 = TREE_INT_CST_LOW (arg11);
7125 /* Move min of absolute values to int11. */
7126 if (absu_hwi (int01) < absu_hwi (int11))
7128 tmp = int01, int01 = int11, int11 = tmp;
7129 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7130 maybe_same = arg01;
7131 swap = true;
7133 else
7134 maybe_same = arg11;
7136 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7137 /* The remainder should not be a constant, otherwise we
7138 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7139 increased the number of multiplications necessary. */
7140 && TREE_CODE (arg10) != INTEGER_CST)
7142 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7143 build_int_cst (TREE_TYPE (arg00),
7144 int01 / int11));
7145 alt1 = arg10;
7146 same = maybe_same;
7147 if (swap)
7148 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7152 if (same)
7153 return fold_build2_loc (loc, MULT_EXPR, type,
7154 fold_build2_loc (loc, code, type,
7155 fold_convert_loc (loc, type, alt0),
7156 fold_convert_loc (loc, type, alt1)),
7157 fold_convert_loc (loc, type, same));
7159 return NULL_TREE;
7162 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7163 specified by EXPR into the buffer PTR of length LEN bytes.
7164 Return the number of bytes placed in the buffer, or zero
7165 upon failure. */
7167 static int
7168 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7170 tree type = TREE_TYPE (expr);
7171 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7172 int byte, offset, word, words;
7173 unsigned char value;
7175 if (total_bytes > len)
7176 return 0;
7177 words = total_bytes / UNITS_PER_WORD;
7179 for (byte = 0; byte < total_bytes; byte++)
7181 int bitpos = byte * BITS_PER_UNIT;
7182 if (bitpos < HOST_BITS_PER_WIDE_INT)
7183 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7184 else
7185 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7186 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7188 if (total_bytes > UNITS_PER_WORD)
7190 word = byte / UNITS_PER_WORD;
7191 if (WORDS_BIG_ENDIAN)
7192 word = (words - 1) - word;
7193 offset = word * UNITS_PER_WORD;
7194 if (BYTES_BIG_ENDIAN)
7195 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7196 else
7197 offset += byte % UNITS_PER_WORD;
7199 else
7200 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7201 ptr[offset] = value;
7203 return total_bytes;
7207 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7208 specified by EXPR into the buffer PTR of length LEN bytes.
7209 Return the number of bytes placed in the buffer, or zero
7210 upon failure. */
7212 static int
7213 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7215 tree type = TREE_TYPE (expr);
7216 enum machine_mode mode = TYPE_MODE (type);
7217 int total_bytes = GET_MODE_SIZE (mode);
7218 FIXED_VALUE_TYPE value;
7219 tree i_value, i_type;
7221 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7222 return 0;
7224 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7226 if (NULL_TREE == i_type
7227 || TYPE_PRECISION (i_type) != total_bytes)
7228 return 0;
7230 value = TREE_FIXED_CST (expr);
7231 i_value = double_int_to_tree (i_type, value.data);
7233 return native_encode_int (i_value, ptr, len);
7237 /* Subroutine of native_encode_expr. Encode the REAL_CST
7238 specified by EXPR into the buffer PTR of length LEN bytes.
7239 Return the number of bytes placed in the buffer, or zero
7240 upon failure. */
7242 static int
7243 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7245 tree type = TREE_TYPE (expr);
7246 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7247 int byte, offset, word, words, bitpos;
7248 unsigned char value;
7250 /* There are always 32 bits in each long, no matter the size of
7251 the hosts long. We handle floating point representations with
7252 up to 192 bits. */
7253 long tmp[6];
7255 if (total_bytes > len)
7256 return 0;
7257 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7259 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7261 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7262 bitpos += BITS_PER_UNIT)
7264 byte = (bitpos / BITS_PER_UNIT) & 3;
7265 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7267 if (UNITS_PER_WORD < 4)
7269 word = byte / UNITS_PER_WORD;
7270 if (WORDS_BIG_ENDIAN)
7271 word = (words - 1) - word;
7272 offset = word * UNITS_PER_WORD;
7273 if (BYTES_BIG_ENDIAN)
7274 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7275 else
7276 offset += byte % UNITS_PER_WORD;
7278 else
7279 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7280 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7282 return total_bytes;
7285 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7286 specified by EXPR into the buffer PTR of length LEN bytes.
7287 Return the number of bytes placed in the buffer, or zero
7288 upon failure. */
7290 static int
7291 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7293 int rsize, isize;
7294 tree part;
7296 part = TREE_REALPART (expr);
7297 rsize = native_encode_expr (part, ptr, len);
7298 if (rsize == 0)
7299 return 0;
7300 part = TREE_IMAGPART (expr);
7301 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7302 if (isize != rsize)
7303 return 0;
7304 return rsize + isize;
7308 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7309 specified by EXPR into the buffer PTR of length LEN bytes.
7310 Return the number of bytes placed in the buffer, or zero
7311 upon failure. */
7313 static int
7314 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7316 unsigned i, count;
7317 int size, offset;
7318 tree itype, elem;
7320 offset = 0;
7321 count = VECTOR_CST_NELTS (expr);
7322 itype = TREE_TYPE (TREE_TYPE (expr));
7323 size = GET_MODE_SIZE (TYPE_MODE (itype));
7324 for (i = 0; i < count; i++)
7326 elem = VECTOR_CST_ELT (expr, i);
7327 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7328 return 0;
7329 offset += size;
7331 return offset;
7335 /* Subroutine of native_encode_expr. Encode the STRING_CST
7336 specified by EXPR into the buffer PTR of length LEN bytes.
7337 Return the number of bytes placed in the buffer, or zero
7338 upon failure. */
7340 static int
7341 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7343 tree type = TREE_TYPE (expr);
7344 HOST_WIDE_INT total_bytes;
7346 if (TREE_CODE (type) != ARRAY_TYPE
7347 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7348 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7349 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7350 return 0;
7351 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7352 if (total_bytes > len)
7353 return 0;
7354 if (TREE_STRING_LENGTH (expr) < total_bytes)
7356 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7357 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7358 total_bytes - TREE_STRING_LENGTH (expr));
7360 else
7361 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7362 return total_bytes;
7366 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7367 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7368 buffer PTR of length LEN bytes. Return the number of bytes
7369 placed in the buffer, or zero upon failure. */
7372 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7374 switch (TREE_CODE (expr))
7376 case INTEGER_CST:
7377 return native_encode_int (expr, ptr, len);
7379 case REAL_CST:
7380 return native_encode_real (expr, ptr, len);
7382 case FIXED_CST:
7383 return native_encode_fixed (expr, ptr, len);
7385 case COMPLEX_CST:
7386 return native_encode_complex (expr, ptr, len);
7388 case VECTOR_CST:
7389 return native_encode_vector (expr, ptr, len);
7391 case STRING_CST:
7392 return native_encode_string (expr, ptr, len);
7394 default:
7395 return 0;
7400 /* Subroutine of native_interpret_expr. Interpret the contents of
7401 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7402 If the buffer cannot be interpreted, return NULL_TREE. */
7404 static tree
7405 native_interpret_int (tree type, const unsigned char *ptr, int len)
7407 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7408 double_int result;
7410 if (total_bytes > len
7411 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7412 return NULL_TREE;
7414 result = double_int::from_buffer (ptr, total_bytes);
7416 return double_int_to_tree (type, result);
7420 /* Subroutine of native_interpret_expr. Interpret the contents of
7421 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7422 If the buffer cannot be interpreted, return NULL_TREE. */
7424 static tree
7425 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7427 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7428 double_int result;
7429 FIXED_VALUE_TYPE fixed_value;
7431 if (total_bytes > len
7432 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7433 return NULL_TREE;
7435 result = double_int::from_buffer (ptr, total_bytes);
7436 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7438 return build_fixed (type, fixed_value);
7442 /* Subroutine of native_interpret_expr. Interpret the contents of
7443 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7444 If the buffer cannot be interpreted, return NULL_TREE. */
7446 static tree
7447 native_interpret_real (tree type, const unsigned char *ptr, int len)
7449 enum machine_mode mode = TYPE_MODE (type);
7450 int total_bytes = GET_MODE_SIZE (mode);
7451 int byte, offset, word, words, bitpos;
7452 unsigned char value;
7453 /* There are always 32 bits in each long, no matter the size of
7454 the hosts long. We handle floating point representations with
7455 up to 192 bits. */
7456 REAL_VALUE_TYPE r;
7457 long tmp[6];
7459 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7460 if (total_bytes > len || total_bytes > 24)
7461 return NULL_TREE;
7462 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7464 memset (tmp, 0, sizeof (tmp));
7465 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7466 bitpos += BITS_PER_UNIT)
7468 byte = (bitpos / BITS_PER_UNIT) & 3;
7469 if (UNITS_PER_WORD < 4)
7471 word = byte / UNITS_PER_WORD;
7472 if (WORDS_BIG_ENDIAN)
7473 word = (words - 1) - word;
7474 offset = word * UNITS_PER_WORD;
7475 if (BYTES_BIG_ENDIAN)
7476 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7477 else
7478 offset += byte % UNITS_PER_WORD;
7480 else
7481 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7482 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7484 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7487 real_from_target (&r, tmp, mode);
7488 return build_real (type, r);
7492 /* Subroutine of native_interpret_expr. Interpret the contents of
7493 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7494 If the buffer cannot be interpreted, return NULL_TREE. */
7496 static tree
7497 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7499 tree etype, rpart, ipart;
7500 int size;
7502 etype = TREE_TYPE (type);
7503 size = GET_MODE_SIZE (TYPE_MODE (etype));
7504 if (size * 2 > len)
7505 return NULL_TREE;
7506 rpart = native_interpret_expr (etype, ptr, size);
7507 if (!rpart)
7508 return NULL_TREE;
7509 ipart = native_interpret_expr (etype, ptr+size, size);
7510 if (!ipart)
7511 return NULL_TREE;
7512 return build_complex (type, rpart, ipart);
7516 /* Subroutine of native_interpret_expr. Interpret the contents of
7517 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7518 If the buffer cannot be interpreted, return NULL_TREE. */
7520 static tree
7521 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7523 tree etype, elem;
7524 int i, size, count;
7525 tree *elements;
7527 etype = TREE_TYPE (type);
7528 size = GET_MODE_SIZE (TYPE_MODE (etype));
7529 count = TYPE_VECTOR_SUBPARTS (type);
7530 if (size * count > len)
7531 return NULL_TREE;
7533 elements = XALLOCAVEC (tree, count);
7534 for (i = count - 1; i >= 0; i--)
7536 elem = native_interpret_expr (etype, ptr+(i*size), size);
7537 if (!elem)
7538 return NULL_TREE;
7539 elements[i] = elem;
7541 return build_vector (type, elements);
7545 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7546 the buffer PTR of length LEN as a constant of type TYPE. For
7547 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7548 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7549 return NULL_TREE. */
7551 tree
7552 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7554 switch (TREE_CODE (type))
7556 case INTEGER_TYPE:
7557 case ENUMERAL_TYPE:
7558 case BOOLEAN_TYPE:
7559 case POINTER_TYPE:
7560 case REFERENCE_TYPE:
7561 return native_interpret_int (type, ptr, len);
7563 case REAL_TYPE:
7564 return native_interpret_real (type, ptr, len);
7566 case FIXED_POINT_TYPE:
7567 return native_interpret_fixed (type, ptr, len);
7569 case COMPLEX_TYPE:
7570 return native_interpret_complex (type, ptr, len);
7572 case VECTOR_TYPE:
7573 return native_interpret_vector (type, ptr, len);
7575 default:
7576 return NULL_TREE;
7580 /* Returns true if we can interpret the contents of a native encoding
7581 as TYPE. */
7583 static bool
7584 can_native_interpret_type_p (tree type)
7586 switch (TREE_CODE (type))
7588 case INTEGER_TYPE:
7589 case ENUMERAL_TYPE:
7590 case BOOLEAN_TYPE:
7591 case POINTER_TYPE:
7592 case REFERENCE_TYPE:
7593 case FIXED_POINT_TYPE:
7594 case REAL_TYPE:
7595 case COMPLEX_TYPE:
7596 case VECTOR_TYPE:
7597 return true;
7598 default:
7599 return false;
7603 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7604 TYPE at compile-time. If we're unable to perform the conversion
7605 return NULL_TREE. */
7607 static tree
7608 fold_view_convert_expr (tree type, tree expr)
7610 /* We support up to 512-bit values (for V8DFmode). */
7611 unsigned char buffer[64];
7612 int len;
7614 /* Check that the host and target are sane. */
7615 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7616 return NULL_TREE;
7618 len = native_encode_expr (expr, buffer, sizeof (buffer));
7619 if (len == 0)
7620 return NULL_TREE;
7622 return native_interpret_expr (type, buffer, len);
7625 /* Build an expression for the address of T. Folds away INDIRECT_REF
7626 to avoid confusing the gimplify process. */
7628 tree
7629 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7631 /* The size of the object is not relevant when talking about its address. */
7632 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7633 t = TREE_OPERAND (t, 0);
7635 if (TREE_CODE (t) == INDIRECT_REF)
7637 t = TREE_OPERAND (t, 0);
7639 if (TREE_TYPE (t) != ptrtype)
7640 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7642 else if (TREE_CODE (t) == MEM_REF
7643 && integer_zerop (TREE_OPERAND (t, 1)))
7644 return TREE_OPERAND (t, 0);
7645 else if (TREE_CODE (t) == MEM_REF
7646 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7647 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7648 TREE_OPERAND (t, 0),
7649 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7650 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7652 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7654 if (TREE_TYPE (t) != ptrtype)
7655 t = fold_convert_loc (loc, ptrtype, t);
7657 else
7658 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7660 return t;
7663 /* Build an expression for the address of T. */
7665 tree
7666 build_fold_addr_expr_loc (location_t loc, tree t)
7668 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7670 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7673 static bool vec_cst_ctor_to_array (tree, tree *);
7675 /* Fold a unary expression of code CODE and type TYPE with operand
7676 OP0. Return the folded expression if folding is successful.
7677 Otherwise, return NULL_TREE. */
7679 tree
7680 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7682 tree tem;
7683 tree arg0;
7684 enum tree_code_class kind = TREE_CODE_CLASS (code);
7686 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7687 && TREE_CODE_LENGTH (code) == 1);
7689 arg0 = op0;
7690 if (arg0)
7692 if (CONVERT_EXPR_CODE_P (code)
7693 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7695 /* Don't use STRIP_NOPS, because signedness of argument type
7696 matters. */
7697 STRIP_SIGN_NOPS (arg0);
7699 else
7701 /* Strip any conversions that don't change the mode. This
7702 is safe for every expression, except for a comparison
7703 expression because its signedness is derived from its
7704 operands.
7706 Note that this is done as an internal manipulation within
7707 the constant folder, in order to find the simplest
7708 representation of the arguments so that their form can be
7709 studied. In any cases, the appropriate type conversions
7710 should be put back in the tree that will get out of the
7711 constant folder. */
7712 STRIP_NOPS (arg0);
7716 if (TREE_CODE_CLASS (code) == tcc_unary)
7718 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7719 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7720 fold_build1_loc (loc, code, type,
7721 fold_convert_loc (loc, TREE_TYPE (op0),
7722 TREE_OPERAND (arg0, 1))));
7723 else if (TREE_CODE (arg0) == COND_EXPR)
7725 tree arg01 = TREE_OPERAND (arg0, 1);
7726 tree arg02 = TREE_OPERAND (arg0, 2);
7727 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7728 arg01 = fold_build1_loc (loc, code, type,
7729 fold_convert_loc (loc,
7730 TREE_TYPE (op0), arg01));
7731 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7732 arg02 = fold_build1_loc (loc, code, type,
7733 fold_convert_loc (loc,
7734 TREE_TYPE (op0), arg02));
7735 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7736 arg01, arg02);
7738 /* If this was a conversion, and all we did was to move into
7739 inside the COND_EXPR, bring it back out. But leave it if
7740 it is a conversion from integer to integer and the
7741 result precision is no wider than a word since such a
7742 conversion is cheap and may be optimized away by combine,
7743 while it couldn't if it were outside the COND_EXPR. Then return
7744 so we don't get into an infinite recursion loop taking the
7745 conversion out and then back in. */
7747 if ((CONVERT_EXPR_CODE_P (code)
7748 || code == NON_LVALUE_EXPR)
7749 && TREE_CODE (tem) == COND_EXPR
7750 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7751 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7752 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7753 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7754 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7755 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7756 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7757 && (INTEGRAL_TYPE_P
7758 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7759 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7760 || flag_syntax_only))
7761 tem = build1_loc (loc, code, type,
7762 build3 (COND_EXPR,
7763 TREE_TYPE (TREE_OPERAND
7764 (TREE_OPERAND (tem, 1), 0)),
7765 TREE_OPERAND (tem, 0),
7766 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7767 TREE_OPERAND (TREE_OPERAND (tem, 2),
7768 0)));
7769 return tem;
7773 switch (code)
7775 case PAREN_EXPR:
7776 /* Re-association barriers around constants and other re-association
7777 barriers can be removed. */
7778 if (CONSTANT_CLASS_P (op0)
7779 || TREE_CODE (op0) == PAREN_EXPR)
7780 return fold_convert_loc (loc, type, op0);
7781 return NULL_TREE;
7783 CASE_CONVERT:
7784 case FLOAT_EXPR:
7785 case FIX_TRUNC_EXPR:
7786 if (TREE_TYPE (op0) == type)
7787 return op0;
7789 if (COMPARISON_CLASS_P (op0))
7791 /* If we have (type) (a CMP b) and type is an integral type, return
7792 new expression involving the new type. Canonicalize
7793 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7794 non-integral type.
7795 Do not fold the result as that would not simplify further, also
7796 folding again results in recursions. */
7797 if (TREE_CODE (type) == BOOLEAN_TYPE)
7798 return build2_loc (loc, TREE_CODE (op0), type,
7799 TREE_OPERAND (op0, 0),
7800 TREE_OPERAND (op0, 1));
7801 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7802 && TREE_CODE (type) != VECTOR_TYPE)
7803 return build3_loc (loc, COND_EXPR, type, op0,
7804 constant_boolean_node (true, type),
7805 constant_boolean_node (false, type));
7808 /* Handle cases of two conversions in a row. */
7809 if (CONVERT_EXPR_P (op0))
7811 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7812 tree inter_type = TREE_TYPE (op0);
7813 int inside_int = INTEGRAL_TYPE_P (inside_type);
7814 int inside_ptr = POINTER_TYPE_P (inside_type);
7815 int inside_float = FLOAT_TYPE_P (inside_type);
7816 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7817 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7818 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7819 int inter_int = INTEGRAL_TYPE_P (inter_type);
7820 int inter_ptr = POINTER_TYPE_P (inter_type);
7821 int inter_float = FLOAT_TYPE_P (inter_type);
7822 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7823 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7824 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7825 int final_int = INTEGRAL_TYPE_P (type);
7826 int final_ptr = POINTER_TYPE_P (type);
7827 int final_float = FLOAT_TYPE_P (type);
7828 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7829 unsigned int final_prec = TYPE_PRECISION (type);
7830 int final_unsignedp = TYPE_UNSIGNED (type);
7832 /* In addition to the cases of two conversions in a row
7833 handled below, if we are converting something to its own
7834 type via an object of identical or wider precision, neither
7835 conversion is needed. */
7836 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7837 && (((inter_int || inter_ptr) && final_int)
7838 || (inter_float && final_float))
7839 && inter_prec >= final_prec)
7840 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7842 /* Likewise, if the intermediate and initial types are either both
7843 float or both integer, we don't need the middle conversion if the
7844 former is wider than the latter and doesn't change the signedness
7845 (for integers). Avoid this if the final type is a pointer since
7846 then we sometimes need the middle conversion. Likewise if the
7847 final type has a precision not equal to the size of its mode. */
7848 if (((inter_int && inside_int)
7849 || (inter_float && inside_float)
7850 || (inter_vec && inside_vec))
7851 && inter_prec >= inside_prec
7852 && (inter_float || inter_vec
7853 || inter_unsignedp == inside_unsignedp)
7854 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7855 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7856 && ! final_ptr
7857 && (! final_vec || inter_prec == inside_prec))
7858 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7860 /* If we have a sign-extension of a zero-extended value, we can
7861 replace that by a single zero-extension. Likewise if the
7862 final conversion does not change precision we can drop the
7863 intermediate conversion. */
7864 if (inside_int && inter_int && final_int
7865 && ((inside_prec < inter_prec && inter_prec < final_prec
7866 && inside_unsignedp && !inter_unsignedp)
7867 || final_prec == inter_prec))
7868 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7870 /* Two conversions in a row are not needed unless:
7871 - some conversion is floating-point (overstrict for now), or
7872 - some conversion is a vector (overstrict for now), or
7873 - the intermediate type is narrower than both initial and
7874 final, or
7875 - the intermediate type and innermost type differ in signedness,
7876 and the outermost type is wider than the intermediate, or
7877 - the initial type is a pointer type and the precisions of the
7878 intermediate and final types differ, or
7879 - the final type is a pointer type and the precisions of the
7880 initial and intermediate types differ. */
7881 if (! inside_float && ! inter_float && ! final_float
7882 && ! inside_vec && ! inter_vec && ! final_vec
7883 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7884 && ! (inside_int && inter_int
7885 && inter_unsignedp != inside_unsignedp
7886 && inter_prec < final_prec)
7887 && ((inter_unsignedp && inter_prec > inside_prec)
7888 == (final_unsignedp && final_prec > inter_prec))
7889 && ! (inside_ptr && inter_prec != final_prec)
7890 && ! (final_ptr && inside_prec != inter_prec)
7891 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7892 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7893 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7896 /* Handle (T *)&A.B.C for A being of type T and B and C
7897 living at offset zero. This occurs frequently in
7898 C++ upcasting and then accessing the base. */
7899 if (TREE_CODE (op0) == ADDR_EXPR
7900 && POINTER_TYPE_P (type)
7901 && handled_component_p (TREE_OPERAND (op0, 0)))
7903 HOST_WIDE_INT bitsize, bitpos;
7904 tree offset;
7905 enum machine_mode mode;
7906 int unsignedp, volatilep;
7907 tree base = TREE_OPERAND (op0, 0);
7908 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7909 &mode, &unsignedp, &volatilep, false);
7910 /* If the reference was to a (constant) zero offset, we can use
7911 the address of the base if it has the same base type
7912 as the result type and the pointer type is unqualified. */
7913 if (! offset && bitpos == 0
7914 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7915 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7916 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7917 return fold_convert_loc (loc, type,
7918 build_fold_addr_expr_loc (loc, base));
7921 if (TREE_CODE (op0) == MODIFY_EXPR
7922 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7923 /* Detect assigning a bitfield. */
7924 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7925 && DECL_BIT_FIELD
7926 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7928 /* Don't leave an assignment inside a conversion
7929 unless assigning a bitfield. */
7930 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7931 /* First do the assignment, then return converted constant. */
7932 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7933 TREE_NO_WARNING (tem) = 1;
7934 TREE_USED (tem) = 1;
7935 return tem;
7938 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7939 constants (if x has signed type, the sign bit cannot be set
7940 in c). This folds extension into the BIT_AND_EXPR.
7941 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7942 very likely don't have maximal range for their precision and this
7943 transformation effectively doesn't preserve non-maximal ranges. */
7944 if (TREE_CODE (type) == INTEGER_TYPE
7945 && TREE_CODE (op0) == BIT_AND_EXPR
7946 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7948 tree and_expr = op0;
7949 tree and0 = TREE_OPERAND (and_expr, 0);
7950 tree and1 = TREE_OPERAND (and_expr, 1);
7951 int change = 0;
7953 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7954 || (TYPE_PRECISION (type)
7955 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7956 change = 1;
7957 else if (TYPE_PRECISION (TREE_TYPE (and1))
7958 <= HOST_BITS_PER_WIDE_INT
7959 && host_integerp (and1, 1))
7961 unsigned HOST_WIDE_INT cst;
7963 cst = tree_low_cst (and1, 1);
7964 cst &= (HOST_WIDE_INT) -1
7965 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7966 change = (cst == 0);
7967 #ifdef LOAD_EXTEND_OP
7968 if (change
7969 && !flag_syntax_only
7970 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7971 == ZERO_EXTEND))
7973 tree uns = unsigned_type_for (TREE_TYPE (and0));
7974 and0 = fold_convert_loc (loc, uns, and0);
7975 and1 = fold_convert_loc (loc, uns, and1);
7977 #endif
7979 if (change)
7981 tem = force_fit_type_double (type, tree_to_double_int (and1),
7982 0, TREE_OVERFLOW (and1));
7983 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7984 fold_convert_loc (loc, type, and0), tem);
7988 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7989 when one of the new casts will fold away. Conservatively we assume
7990 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7991 if (POINTER_TYPE_P (type)
7992 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7993 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7994 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7995 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7996 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7998 tree arg00 = TREE_OPERAND (arg0, 0);
7999 tree arg01 = TREE_OPERAND (arg0, 1);
8001 return fold_build_pointer_plus_loc
8002 (loc, fold_convert_loc (loc, type, arg00), arg01);
8005 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8006 of the same precision, and X is an integer type not narrower than
8007 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8008 if (INTEGRAL_TYPE_P (type)
8009 && TREE_CODE (op0) == BIT_NOT_EXPR
8010 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8011 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8012 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8014 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8015 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8016 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8017 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8018 fold_convert_loc (loc, type, tem));
8021 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8022 type of X and Y (integer types only). */
8023 if (INTEGRAL_TYPE_P (type)
8024 && TREE_CODE (op0) == MULT_EXPR
8025 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8026 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8028 /* Be careful not to introduce new overflows. */
8029 tree mult_type;
8030 if (TYPE_OVERFLOW_WRAPS (type))
8031 mult_type = type;
8032 else
8033 mult_type = unsigned_type_for (type);
8035 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8037 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8038 fold_convert_loc (loc, mult_type,
8039 TREE_OPERAND (op0, 0)),
8040 fold_convert_loc (loc, mult_type,
8041 TREE_OPERAND (op0, 1)));
8042 return fold_convert_loc (loc, type, tem);
8046 tem = fold_convert_const (code, type, op0);
8047 return tem ? tem : NULL_TREE;
8049 case ADDR_SPACE_CONVERT_EXPR:
8050 if (integer_zerop (arg0))
8051 return fold_convert_const (code, type, arg0);
8052 return NULL_TREE;
8054 case FIXED_CONVERT_EXPR:
8055 tem = fold_convert_const (code, type, arg0);
8056 return tem ? tem : NULL_TREE;
8058 case VIEW_CONVERT_EXPR:
8059 if (TREE_TYPE (op0) == type)
8060 return op0;
8061 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8062 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8063 type, TREE_OPERAND (op0, 0));
8064 if (TREE_CODE (op0) == MEM_REF)
8065 return fold_build2_loc (loc, MEM_REF, type,
8066 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8068 /* For integral conversions with the same precision or pointer
8069 conversions use a NOP_EXPR instead. */
8070 if ((INTEGRAL_TYPE_P (type)
8071 || POINTER_TYPE_P (type))
8072 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8073 || POINTER_TYPE_P (TREE_TYPE (op0)))
8074 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8075 return fold_convert_loc (loc, type, op0);
8077 /* Strip inner integral conversions that do not change the precision. */
8078 if (CONVERT_EXPR_P (op0)
8079 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8080 || POINTER_TYPE_P (TREE_TYPE (op0)))
8081 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8082 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8083 && (TYPE_PRECISION (TREE_TYPE (op0))
8084 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8085 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8086 type, TREE_OPERAND (op0, 0));
8088 return fold_view_convert_expr (type, op0);
8090 case NEGATE_EXPR:
8091 tem = fold_negate_expr (loc, arg0);
8092 if (tem)
8093 return fold_convert_loc (loc, type, tem);
8094 return NULL_TREE;
8096 case ABS_EXPR:
8097 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8098 return fold_abs_const (arg0, type);
8099 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8100 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8101 /* Convert fabs((double)float) into (double)fabsf(float). */
8102 else if (TREE_CODE (arg0) == NOP_EXPR
8103 && TREE_CODE (type) == REAL_TYPE)
8105 tree targ0 = strip_float_extensions (arg0);
8106 if (targ0 != arg0)
8107 return fold_convert_loc (loc, type,
8108 fold_build1_loc (loc, ABS_EXPR,
8109 TREE_TYPE (targ0),
8110 targ0));
8112 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8113 else if (TREE_CODE (arg0) == ABS_EXPR)
8114 return arg0;
8115 else if (tree_expr_nonnegative_p (arg0))
8116 return arg0;
8118 /* Strip sign ops from argument. */
8119 if (TREE_CODE (type) == REAL_TYPE)
8121 tem = fold_strip_sign_ops (arg0);
8122 if (tem)
8123 return fold_build1_loc (loc, ABS_EXPR, type,
8124 fold_convert_loc (loc, type, tem));
8126 return NULL_TREE;
8128 case CONJ_EXPR:
8129 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8130 return fold_convert_loc (loc, type, arg0);
8131 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8133 tree itype = TREE_TYPE (type);
8134 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8135 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8136 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8137 negate_expr (ipart));
8139 if (TREE_CODE (arg0) == COMPLEX_CST)
8141 tree itype = TREE_TYPE (type);
8142 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8143 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8144 return build_complex (type, rpart, negate_expr (ipart));
8146 if (TREE_CODE (arg0) == CONJ_EXPR)
8147 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8148 return NULL_TREE;
8150 case BIT_NOT_EXPR:
8151 if (TREE_CODE (arg0) == INTEGER_CST)
8152 return fold_not_const (arg0, type);
8153 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8154 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8155 /* Convert ~ (-A) to A - 1. */
8156 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8157 return fold_build2_loc (loc, MINUS_EXPR, type,
8158 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8159 build_int_cst (type, 1));
8160 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8161 else if (INTEGRAL_TYPE_P (type)
8162 && ((TREE_CODE (arg0) == MINUS_EXPR
8163 && integer_onep (TREE_OPERAND (arg0, 1)))
8164 || (TREE_CODE (arg0) == PLUS_EXPR
8165 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8166 return fold_build1_loc (loc, NEGATE_EXPR, type,
8167 fold_convert_loc (loc, type,
8168 TREE_OPERAND (arg0, 0)));
8169 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8170 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8171 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8172 fold_convert_loc (loc, type,
8173 TREE_OPERAND (arg0, 0)))))
8174 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8175 fold_convert_loc (loc, type,
8176 TREE_OPERAND (arg0, 1)));
8177 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8178 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8179 fold_convert_loc (loc, type,
8180 TREE_OPERAND (arg0, 1)))))
8181 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8182 fold_convert_loc (loc, type,
8183 TREE_OPERAND (arg0, 0)), tem);
8184 /* Perform BIT_NOT_EXPR on each element individually. */
8185 else if (TREE_CODE (arg0) == VECTOR_CST)
8187 tree *elements;
8188 tree elem;
8189 unsigned count = VECTOR_CST_NELTS (arg0), i;
8191 elements = XALLOCAVEC (tree, count);
8192 for (i = 0; i < count; i++)
8194 elem = VECTOR_CST_ELT (arg0, i);
8195 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8196 if (elem == NULL_TREE)
8197 break;
8198 elements[i] = elem;
8200 if (i == count)
8201 return build_vector (type, elements);
8204 return NULL_TREE;
8206 case TRUTH_NOT_EXPR:
8207 /* The argument to invert_truthvalue must have Boolean type. */
8208 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8209 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8211 /* Note that the operand of this must be an int
8212 and its values must be 0 or 1.
8213 ("true" is a fixed value perhaps depending on the language,
8214 but we don't handle values other than 1 correctly yet.) */
8215 tem = fold_truth_not_expr (loc, arg0);
8216 if (!tem)
8217 return NULL_TREE;
8218 return fold_convert_loc (loc, type, tem);
8220 case REALPART_EXPR:
8221 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8222 return fold_convert_loc (loc, type, arg0);
8223 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8224 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8225 TREE_OPERAND (arg0, 1));
8226 if (TREE_CODE (arg0) == COMPLEX_CST)
8227 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8228 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8230 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8231 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8232 fold_build1_loc (loc, REALPART_EXPR, itype,
8233 TREE_OPERAND (arg0, 0)),
8234 fold_build1_loc (loc, REALPART_EXPR, itype,
8235 TREE_OPERAND (arg0, 1)));
8236 return fold_convert_loc (loc, type, tem);
8238 if (TREE_CODE (arg0) == CONJ_EXPR)
8240 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8241 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8242 TREE_OPERAND (arg0, 0));
8243 return fold_convert_loc (loc, type, tem);
8245 if (TREE_CODE (arg0) == CALL_EXPR)
8247 tree fn = get_callee_fndecl (arg0);
8248 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8249 switch (DECL_FUNCTION_CODE (fn))
8251 CASE_FLT_FN (BUILT_IN_CEXPI):
8252 fn = mathfn_built_in (type, BUILT_IN_COS);
8253 if (fn)
8254 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8255 break;
8257 default:
8258 break;
8261 return NULL_TREE;
8263 case IMAGPART_EXPR:
8264 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8265 return build_zero_cst (type);
8266 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8267 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8268 TREE_OPERAND (arg0, 0));
8269 if (TREE_CODE (arg0) == COMPLEX_CST)
8270 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8271 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8273 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8274 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8275 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8276 TREE_OPERAND (arg0, 0)),
8277 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8278 TREE_OPERAND (arg0, 1)));
8279 return fold_convert_loc (loc, type, tem);
8281 if (TREE_CODE (arg0) == CONJ_EXPR)
8283 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8284 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8285 return fold_convert_loc (loc, type, negate_expr (tem));
8287 if (TREE_CODE (arg0) == CALL_EXPR)
8289 tree fn = get_callee_fndecl (arg0);
8290 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8291 switch (DECL_FUNCTION_CODE (fn))
8293 CASE_FLT_FN (BUILT_IN_CEXPI):
8294 fn = mathfn_built_in (type, BUILT_IN_SIN);
8295 if (fn)
8296 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8297 break;
8299 default:
8300 break;
8303 return NULL_TREE;
8305 case INDIRECT_REF:
8306 /* Fold *&X to X if X is an lvalue. */
8307 if (TREE_CODE (op0) == ADDR_EXPR)
8309 tree op00 = TREE_OPERAND (op0, 0);
8310 if ((TREE_CODE (op00) == VAR_DECL
8311 || TREE_CODE (op00) == PARM_DECL
8312 || TREE_CODE (op00) == RESULT_DECL)
8313 && !TREE_READONLY (op00))
8314 return op00;
8316 return NULL_TREE;
8318 case VEC_UNPACK_LO_EXPR:
8319 case VEC_UNPACK_HI_EXPR:
8320 case VEC_UNPACK_FLOAT_LO_EXPR:
8321 case VEC_UNPACK_FLOAT_HI_EXPR:
8323 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8324 tree *elts;
8325 enum tree_code subcode;
8327 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8328 if (TREE_CODE (arg0) != VECTOR_CST)
8329 return NULL_TREE;
8331 elts = XALLOCAVEC (tree, nelts * 2);
8332 if (!vec_cst_ctor_to_array (arg0, elts))
8333 return NULL_TREE;
8335 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8336 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8337 elts += nelts;
8339 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8340 subcode = NOP_EXPR;
8341 else
8342 subcode = FLOAT_EXPR;
8344 for (i = 0; i < nelts; i++)
8346 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8347 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8348 return NULL_TREE;
8351 return build_vector (type, elts);
8354 case REDUC_MIN_EXPR:
8355 case REDUC_MAX_EXPR:
8356 case REDUC_PLUS_EXPR:
8358 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8359 tree *elts;
8360 enum tree_code subcode;
8362 if (TREE_CODE (op0) != VECTOR_CST)
8363 return NULL_TREE;
8365 elts = XALLOCAVEC (tree, nelts);
8366 if (!vec_cst_ctor_to_array (op0, elts))
8367 return NULL_TREE;
8369 switch (code)
8371 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8372 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8373 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8374 default: gcc_unreachable ();
8377 for (i = 1; i < nelts; i++)
8379 elts[0] = const_binop (subcode, elts[0], elts[i]);
8380 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8381 return NULL_TREE;
8382 elts[i] = build_zero_cst (TREE_TYPE (type));
8385 return build_vector (type, elts);
8388 default:
8389 return NULL_TREE;
8390 } /* switch (code) */
8394 /* If the operation was a conversion do _not_ mark a resulting constant
8395 with TREE_OVERFLOW if the original constant was not. These conversions
8396 have implementation defined behavior and retaining the TREE_OVERFLOW
8397 flag here would confuse later passes such as VRP. */
8398 tree
8399 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8400 tree type, tree op0)
8402 tree res = fold_unary_loc (loc, code, type, op0);
8403 if (res
8404 && TREE_CODE (res) == INTEGER_CST
8405 && TREE_CODE (op0) == INTEGER_CST
8406 && CONVERT_EXPR_CODE_P (code))
8407 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8409 return res;
8412 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8413 operands OP0 and OP1. LOC is the location of the resulting expression.
8414 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8415 Return the folded expression if folding is successful. Otherwise,
8416 return NULL_TREE. */
8417 static tree
8418 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8419 tree arg0, tree arg1, tree op0, tree op1)
8421 tree tem;
8423 /* We only do these simplifications if we are optimizing. */
8424 if (!optimize)
8425 return NULL_TREE;
8427 /* Check for things like (A || B) && (A || C). We can convert this
8428 to A || (B && C). Note that either operator can be any of the four
8429 truth and/or operations and the transformation will still be
8430 valid. Also note that we only care about order for the
8431 ANDIF and ORIF operators. If B contains side effects, this
8432 might change the truth-value of A. */
8433 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8434 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8435 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8436 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8437 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8438 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8440 tree a00 = TREE_OPERAND (arg0, 0);
8441 tree a01 = TREE_OPERAND (arg0, 1);
8442 tree a10 = TREE_OPERAND (arg1, 0);
8443 tree a11 = TREE_OPERAND (arg1, 1);
8444 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8445 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8446 && (code == TRUTH_AND_EXPR
8447 || code == TRUTH_OR_EXPR));
8449 if (operand_equal_p (a00, a10, 0))
8450 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8451 fold_build2_loc (loc, code, type, a01, a11));
8452 else if (commutative && operand_equal_p (a00, a11, 0))
8453 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8454 fold_build2_loc (loc, code, type, a01, a10));
8455 else if (commutative && operand_equal_p (a01, a10, 0))
8456 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8457 fold_build2_loc (loc, code, type, a00, a11));
8459 /* This case if tricky because we must either have commutative
8460 operators or else A10 must not have side-effects. */
8462 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8463 && operand_equal_p (a01, a11, 0))
8464 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8465 fold_build2_loc (loc, code, type, a00, a10),
8466 a01);
8469 /* See if we can build a range comparison. */
8470 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8471 return tem;
8473 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8474 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8476 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8477 if (tem)
8478 return fold_build2_loc (loc, code, type, tem, arg1);
8481 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8482 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8484 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8485 if (tem)
8486 return fold_build2_loc (loc, code, type, arg0, tem);
8489 /* Check for the possibility of merging component references. If our
8490 lhs is another similar operation, try to merge its rhs with our
8491 rhs. Then try to merge our lhs and rhs. */
8492 if (TREE_CODE (arg0) == code
8493 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8494 TREE_OPERAND (arg0, 1), arg1)))
8495 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8497 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8498 return tem;
8500 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8501 && (code == TRUTH_AND_EXPR
8502 || code == TRUTH_ANDIF_EXPR
8503 || code == TRUTH_OR_EXPR
8504 || code == TRUTH_ORIF_EXPR))
8506 enum tree_code ncode, icode;
8508 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8509 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8510 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8512 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8513 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8514 We don't want to pack more than two leafs to a non-IF AND/OR
8515 expression.
8516 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8517 equal to IF-CODE, then we don't want to add right-hand operand.
8518 If the inner right-hand side of left-hand operand has
8519 side-effects, or isn't simple, then we can't add to it,
8520 as otherwise we might destroy if-sequence. */
8521 if (TREE_CODE (arg0) == icode
8522 && simple_operand_p_2 (arg1)
8523 /* Needed for sequence points to handle trappings, and
8524 side-effects. */
8525 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8527 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8528 arg1);
8529 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8530 tem);
8532 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8533 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8534 else if (TREE_CODE (arg1) == icode
8535 && simple_operand_p_2 (arg0)
8536 /* Needed for sequence points to handle trappings, and
8537 side-effects. */
8538 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8540 tem = fold_build2_loc (loc, ncode, type,
8541 arg0, TREE_OPERAND (arg1, 0));
8542 return fold_build2_loc (loc, icode, type, tem,
8543 TREE_OPERAND (arg1, 1));
8545 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8546 into (A OR B).
8547 For sequence point consistancy, we need to check for trapping,
8548 and side-effects. */
8549 else if (code == icode && simple_operand_p_2 (arg0)
8550 && simple_operand_p_2 (arg1))
8551 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8554 return NULL_TREE;
8557 /* Fold a binary expression of code CODE and type TYPE with operands
8558 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8559 Return the folded expression if folding is successful. Otherwise,
8560 return NULL_TREE. */
8562 static tree
8563 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8565 enum tree_code compl_code;
8567 if (code == MIN_EXPR)
8568 compl_code = MAX_EXPR;
8569 else if (code == MAX_EXPR)
8570 compl_code = MIN_EXPR;
8571 else
8572 gcc_unreachable ();
8574 /* MIN (MAX (a, b), b) == b. */
8575 if (TREE_CODE (op0) == compl_code
8576 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8577 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8579 /* MIN (MAX (b, a), b) == b. */
8580 if (TREE_CODE (op0) == compl_code
8581 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8582 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8583 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8585 /* MIN (a, MAX (a, b)) == a. */
8586 if (TREE_CODE (op1) == compl_code
8587 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8588 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8589 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8591 /* MIN (a, MAX (b, a)) == a. */
8592 if (TREE_CODE (op1) == compl_code
8593 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8594 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8595 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8597 return NULL_TREE;
8600 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8601 by changing CODE to reduce the magnitude of constants involved in
8602 ARG0 of the comparison.
8603 Returns a canonicalized comparison tree if a simplification was
8604 possible, otherwise returns NULL_TREE.
8605 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8606 valid if signed overflow is undefined. */
8608 static tree
8609 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8610 tree arg0, tree arg1,
8611 bool *strict_overflow_p)
8613 enum tree_code code0 = TREE_CODE (arg0);
8614 tree t, cst0 = NULL_TREE;
8615 int sgn0;
8616 bool swap = false;
8618 /* Match A +- CST code arg1 and CST code arg1. We can change the
8619 first form only if overflow is undefined. */
8620 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8621 /* In principle pointers also have undefined overflow behavior,
8622 but that causes problems elsewhere. */
8623 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8624 && (code0 == MINUS_EXPR
8625 || code0 == PLUS_EXPR)
8626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8627 || code0 == INTEGER_CST))
8628 return NULL_TREE;
8630 /* Identify the constant in arg0 and its sign. */
8631 if (code0 == INTEGER_CST)
8632 cst0 = arg0;
8633 else
8634 cst0 = TREE_OPERAND (arg0, 1);
8635 sgn0 = tree_int_cst_sgn (cst0);
8637 /* Overflowed constants and zero will cause problems. */
8638 if (integer_zerop (cst0)
8639 || TREE_OVERFLOW (cst0))
8640 return NULL_TREE;
8642 /* See if we can reduce the magnitude of the constant in
8643 arg0 by changing the comparison code. */
8644 if (code0 == INTEGER_CST)
8646 /* CST <= arg1 -> CST-1 < arg1. */
8647 if (code == LE_EXPR && sgn0 == 1)
8648 code = LT_EXPR;
8649 /* -CST < arg1 -> -CST-1 <= arg1. */
8650 else if (code == LT_EXPR && sgn0 == -1)
8651 code = LE_EXPR;
8652 /* CST > arg1 -> CST-1 >= arg1. */
8653 else if (code == GT_EXPR && sgn0 == 1)
8654 code = GE_EXPR;
8655 /* -CST >= arg1 -> -CST-1 > arg1. */
8656 else if (code == GE_EXPR && sgn0 == -1)
8657 code = GT_EXPR;
8658 else
8659 return NULL_TREE;
8660 /* arg1 code' CST' might be more canonical. */
8661 swap = true;
8663 else
8665 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8666 if (code == LT_EXPR
8667 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8668 code = LE_EXPR;
8669 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8670 else if (code == GT_EXPR
8671 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8672 code = GE_EXPR;
8673 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8674 else if (code == LE_EXPR
8675 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8676 code = LT_EXPR;
8677 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8678 else if (code == GE_EXPR
8679 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8680 code = GT_EXPR;
8681 else
8682 return NULL_TREE;
8683 *strict_overflow_p = true;
8686 /* Now build the constant reduced in magnitude. But not if that
8687 would produce one outside of its types range. */
8688 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8689 && ((sgn0 == 1
8690 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8691 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8692 || (sgn0 == -1
8693 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8694 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8695 /* We cannot swap the comparison here as that would cause us to
8696 endlessly recurse. */
8697 return NULL_TREE;
8699 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8700 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8701 if (code0 != INTEGER_CST)
8702 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8703 t = fold_convert (TREE_TYPE (arg1), t);
8705 /* If swapping might yield to a more canonical form, do so. */
8706 if (swap)
8707 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8708 else
8709 return fold_build2_loc (loc, code, type, t, arg1);
8712 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8713 overflow further. Try to decrease the magnitude of constants involved
8714 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8715 and put sole constants at the second argument position.
8716 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8718 static tree
8719 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8720 tree arg0, tree arg1)
8722 tree t;
8723 bool strict_overflow_p;
8724 const char * const warnmsg = G_("assuming signed overflow does not occur "
8725 "when reducing constant in comparison");
8727 /* Try canonicalization by simplifying arg0. */
8728 strict_overflow_p = false;
8729 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8730 &strict_overflow_p);
8731 if (t)
8733 if (strict_overflow_p)
8734 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8735 return t;
8738 /* Try canonicalization by simplifying arg1 using the swapped
8739 comparison. */
8740 code = swap_tree_comparison (code);
8741 strict_overflow_p = false;
8742 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8743 &strict_overflow_p);
8744 if (t && strict_overflow_p)
8745 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8746 return t;
8749 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8750 space. This is used to avoid issuing overflow warnings for
8751 expressions like &p->x which can not wrap. */
8753 static bool
8754 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8756 double_int di_offset, total;
8758 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8759 return true;
8761 if (bitpos < 0)
8762 return true;
8764 if (offset == NULL_TREE)
8765 di_offset = double_int_zero;
8766 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8767 return true;
8768 else
8769 di_offset = TREE_INT_CST (offset);
8771 bool overflow;
8772 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8773 total = di_offset.add_with_sign (units, true, &overflow);
8774 if (overflow)
8775 return true;
8777 if (total.high != 0)
8778 return true;
8780 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8781 if (size <= 0)
8782 return true;
8784 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8785 array. */
8786 if (TREE_CODE (base) == ADDR_EXPR)
8788 HOST_WIDE_INT base_size;
8790 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8791 if (base_size > 0 && size < base_size)
8792 size = base_size;
8795 return total.low > (unsigned HOST_WIDE_INT) size;
8798 /* Subroutine of fold_binary. This routine performs all of the
8799 transformations that are common to the equality/inequality
8800 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8801 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8802 fold_binary should call fold_binary. Fold a comparison with
8803 tree code CODE and type TYPE with operands OP0 and OP1. Return
8804 the folded comparison or NULL_TREE. */
8806 static tree
8807 fold_comparison (location_t loc, enum tree_code code, tree type,
8808 tree op0, tree op1)
8810 tree arg0, arg1, tem;
8812 arg0 = op0;
8813 arg1 = op1;
8815 STRIP_SIGN_NOPS (arg0);
8816 STRIP_SIGN_NOPS (arg1);
8818 tem = fold_relational_const (code, type, arg0, arg1);
8819 if (tem != NULL_TREE)
8820 return tem;
8822 /* If one arg is a real or integer constant, put it last. */
8823 if (tree_swap_operands_p (arg0, arg1, true))
8824 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8826 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8827 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8828 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8829 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8830 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8831 && (TREE_CODE (arg1) == INTEGER_CST
8832 && !TREE_OVERFLOW (arg1)))
8834 tree const1 = TREE_OPERAND (arg0, 1);
8835 tree const2 = arg1;
8836 tree variable = TREE_OPERAND (arg0, 0);
8837 tree lhs;
8838 int lhs_add;
8839 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8841 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8842 TREE_TYPE (arg1), const2, const1);
8844 /* If the constant operation overflowed this can be
8845 simplified as a comparison against INT_MAX/INT_MIN. */
8846 if (TREE_CODE (lhs) == INTEGER_CST
8847 && TREE_OVERFLOW (lhs))
8849 int const1_sgn = tree_int_cst_sgn (const1);
8850 enum tree_code code2 = code;
8852 /* Get the sign of the constant on the lhs if the
8853 operation were VARIABLE + CONST1. */
8854 if (TREE_CODE (arg0) == MINUS_EXPR)
8855 const1_sgn = -const1_sgn;
8857 /* The sign of the constant determines if we overflowed
8858 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8859 Canonicalize to the INT_MIN overflow by swapping the comparison
8860 if necessary. */
8861 if (const1_sgn == -1)
8862 code2 = swap_tree_comparison (code);
8864 /* We now can look at the canonicalized case
8865 VARIABLE + 1 CODE2 INT_MIN
8866 and decide on the result. */
8867 if (code2 == LT_EXPR
8868 || code2 == LE_EXPR
8869 || code2 == EQ_EXPR)
8870 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8871 else if (code2 == NE_EXPR
8872 || code2 == GE_EXPR
8873 || code2 == GT_EXPR)
8874 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8877 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8878 && (TREE_CODE (lhs) != INTEGER_CST
8879 || !TREE_OVERFLOW (lhs)))
8881 if (code != EQ_EXPR && code != NE_EXPR)
8882 fold_overflow_warning ("assuming signed overflow does not occur "
8883 "when changing X +- C1 cmp C2 to "
8884 "X cmp C1 +- C2",
8885 WARN_STRICT_OVERFLOW_COMPARISON);
8886 return fold_build2_loc (loc, code, type, variable, lhs);
8890 /* For comparisons of pointers we can decompose it to a compile time
8891 comparison of the base objects and the offsets into the object.
8892 This requires at least one operand being an ADDR_EXPR or a
8893 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8894 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8895 && (TREE_CODE (arg0) == ADDR_EXPR
8896 || TREE_CODE (arg1) == ADDR_EXPR
8897 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8898 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8900 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8901 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8902 enum machine_mode mode;
8903 int volatilep, unsignedp;
8904 bool indirect_base0 = false, indirect_base1 = false;
8906 /* Get base and offset for the access. Strip ADDR_EXPR for
8907 get_inner_reference, but put it back by stripping INDIRECT_REF
8908 off the base object if possible. indirect_baseN will be true
8909 if baseN is not an address but refers to the object itself. */
8910 base0 = arg0;
8911 if (TREE_CODE (arg0) == ADDR_EXPR)
8913 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8914 &bitsize, &bitpos0, &offset0, &mode,
8915 &unsignedp, &volatilep, false);
8916 if (TREE_CODE (base0) == INDIRECT_REF)
8917 base0 = TREE_OPERAND (base0, 0);
8918 else
8919 indirect_base0 = true;
8921 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8923 base0 = TREE_OPERAND (arg0, 0);
8924 STRIP_SIGN_NOPS (base0);
8925 if (TREE_CODE (base0) == ADDR_EXPR)
8927 base0 = TREE_OPERAND (base0, 0);
8928 indirect_base0 = true;
8930 offset0 = TREE_OPERAND (arg0, 1);
8931 if (host_integerp (offset0, 0))
8933 HOST_WIDE_INT off = size_low_cst (offset0);
8934 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8935 * BITS_PER_UNIT)
8936 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8938 bitpos0 = off * BITS_PER_UNIT;
8939 offset0 = NULL_TREE;
8944 base1 = arg1;
8945 if (TREE_CODE (arg1) == ADDR_EXPR)
8947 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8948 &bitsize, &bitpos1, &offset1, &mode,
8949 &unsignedp, &volatilep, false);
8950 if (TREE_CODE (base1) == INDIRECT_REF)
8951 base1 = TREE_OPERAND (base1, 0);
8952 else
8953 indirect_base1 = true;
8955 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8957 base1 = TREE_OPERAND (arg1, 0);
8958 STRIP_SIGN_NOPS (base1);
8959 if (TREE_CODE (base1) == ADDR_EXPR)
8961 base1 = TREE_OPERAND (base1, 0);
8962 indirect_base1 = true;
8964 offset1 = TREE_OPERAND (arg1, 1);
8965 if (host_integerp (offset1, 0))
8967 HOST_WIDE_INT off = size_low_cst (offset1);
8968 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8969 * BITS_PER_UNIT)
8970 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8972 bitpos1 = off * BITS_PER_UNIT;
8973 offset1 = NULL_TREE;
8978 /* A local variable can never be pointed to by
8979 the default SSA name of an incoming parameter. */
8980 if ((TREE_CODE (arg0) == ADDR_EXPR
8981 && indirect_base0
8982 && TREE_CODE (base0) == VAR_DECL
8983 && auto_var_in_fn_p (base0, current_function_decl)
8984 && !indirect_base1
8985 && TREE_CODE (base1) == SSA_NAME
8986 && SSA_NAME_IS_DEFAULT_DEF (base1)
8987 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8988 || (TREE_CODE (arg1) == ADDR_EXPR
8989 && indirect_base1
8990 && TREE_CODE (base1) == VAR_DECL
8991 && auto_var_in_fn_p (base1, current_function_decl)
8992 && !indirect_base0
8993 && TREE_CODE (base0) == SSA_NAME
8994 && SSA_NAME_IS_DEFAULT_DEF (base0)
8995 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8997 if (code == NE_EXPR)
8998 return constant_boolean_node (1, type);
8999 else if (code == EQ_EXPR)
9000 return constant_boolean_node (0, type);
9002 /* If we have equivalent bases we might be able to simplify. */
9003 else if (indirect_base0 == indirect_base1
9004 && operand_equal_p (base0, base1, 0))
9006 /* We can fold this expression to a constant if the non-constant
9007 offset parts are equal. */
9008 if ((offset0 == offset1
9009 || (offset0 && offset1
9010 && operand_equal_p (offset0, offset1, 0)))
9011 && (code == EQ_EXPR
9012 || code == NE_EXPR
9013 || (indirect_base0 && DECL_P (base0))
9014 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9017 if (code != EQ_EXPR
9018 && code != NE_EXPR
9019 && bitpos0 != bitpos1
9020 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9021 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9022 fold_overflow_warning (("assuming pointer wraparound does not "
9023 "occur when comparing P +- C1 with "
9024 "P +- C2"),
9025 WARN_STRICT_OVERFLOW_CONDITIONAL);
9027 switch (code)
9029 case EQ_EXPR:
9030 return constant_boolean_node (bitpos0 == bitpos1, type);
9031 case NE_EXPR:
9032 return constant_boolean_node (bitpos0 != bitpos1, type);
9033 case LT_EXPR:
9034 return constant_boolean_node (bitpos0 < bitpos1, type);
9035 case LE_EXPR:
9036 return constant_boolean_node (bitpos0 <= bitpos1, type);
9037 case GE_EXPR:
9038 return constant_boolean_node (bitpos0 >= bitpos1, type);
9039 case GT_EXPR:
9040 return constant_boolean_node (bitpos0 > bitpos1, type);
9041 default:;
9044 /* We can simplify the comparison to a comparison of the variable
9045 offset parts if the constant offset parts are equal.
9046 Be careful to use signed sizetype here because otherwise we
9047 mess with array offsets in the wrong way. This is possible
9048 because pointer arithmetic is restricted to retain within an
9049 object and overflow on pointer differences is undefined as of
9050 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9051 else if (bitpos0 == bitpos1
9052 && ((code == EQ_EXPR || code == NE_EXPR)
9053 || (indirect_base0 && DECL_P (base0))
9054 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9056 /* By converting to signed sizetype we cover middle-end pointer
9057 arithmetic which operates on unsigned pointer types of size
9058 type size and ARRAY_REF offsets which are properly sign or
9059 zero extended from their type in case it is narrower than
9060 sizetype. */
9061 if (offset0 == NULL_TREE)
9062 offset0 = build_int_cst (ssizetype, 0);
9063 else
9064 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9065 if (offset1 == NULL_TREE)
9066 offset1 = build_int_cst (ssizetype, 0);
9067 else
9068 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9070 if (code != EQ_EXPR
9071 && code != NE_EXPR
9072 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9073 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9074 fold_overflow_warning (("assuming pointer wraparound does not "
9075 "occur when comparing P +- C1 with "
9076 "P +- C2"),
9077 WARN_STRICT_OVERFLOW_COMPARISON);
9079 return fold_build2_loc (loc, code, type, offset0, offset1);
9082 /* For non-equal bases we can simplify if they are addresses
9083 of local binding decls or constants. */
9084 else if (indirect_base0 && indirect_base1
9085 /* We know that !operand_equal_p (base0, base1, 0)
9086 because the if condition was false. But make
9087 sure two decls are not the same. */
9088 && base0 != base1
9089 && TREE_CODE (arg0) == ADDR_EXPR
9090 && TREE_CODE (arg1) == ADDR_EXPR
9091 && (((TREE_CODE (base0) == VAR_DECL
9092 || TREE_CODE (base0) == PARM_DECL)
9093 && (targetm.binds_local_p (base0)
9094 || CONSTANT_CLASS_P (base1)))
9095 || CONSTANT_CLASS_P (base0))
9096 && (((TREE_CODE (base1) == VAR_DECL
9097 || TREE_CODE (base1) == PARM_DECL)
9098 && (targetm.binds_local_p (base1)
9099 || CONSTANT_CLASS_P (base0)))
9100 || CONSTANT_CLASS_P (base1)))
9102 if (code == EQ_EXPR)
9103 return omit_two_operands_loc (loc, type, boolean_false_node,
9104 arg0, arg1);
9105 else if (code == NE_EXPR)
9106 return omit_two_operands_loc (loc, type, boolean_true_node,
9107 arg0, arg1);
9109 /* For equal offsets we can simplify to a comparison of the
9110 base addresses. */
9111 else if (bitpos0 == bitpos1
9112 && (indirect_base0
9113 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9114 && (indirect_base1
9115 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9116 && ((offset0 == offset1)
9117 || (offset0 && offset1
9118 && operand_equal_p (offset0, offset1, 0))))
9120 if (indirect_base0)
9121 base0 = build_fold_addr_expr_loc (loc, base0);
9122 if (indirect_base1)
9123 base1 = build_fold_addr_expr_loc (loc, base1);
9124 return fold_build2_loc (loc, code, type, base0, base1);
9128 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9129 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9130 the resulting offset is smaller in absolute value than the
9131 original one. */
9132 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9133 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9134 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9135 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9136 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9137 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9138 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9140 tree const1 = TREE_OPERAND (arg0, 1);
9141 tree const2 = TREE_OPERAND (arg1, 1);
9142 tree variable1 = TREE_OPERAND (arg0, 0);
9143 tree variable2 = TREE_OPERAND (arg1, 0);
9144 tree cst;
9145 const char * const warnmsg = G_("assuming signed overflow does not "
9146 "occur when combining constants around "
9147 "a comparison");
9149 /* Put the constant on the side where it doesn't overflow and is
9150 of lower absolute value than before. */
9151 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9152 ? MINUS_EXPR : PLUS_EXPR,
9153 const2, const1);
9154 if (!TREE_OVERFLOW (cst)
9155 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9157 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9158 return fold_build2_loc (loc, code, type,
9159 variable1,
9160 fold_build2_loc (loc,
9161 TREE_CODE (arg1), TREE_TYPE (arg1),
9162 variable2, cst));
9165 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9166 ? MINUS_EXPR : PLUS_EXPR,
9167 const1, const2);
9168 if (!TREE_OVERFLOW (cst)
9169 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9171 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9172 return fold_build2_loc (loc, code, type,
9173 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9174 variable1, cst),
9175 variable2);
9179 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9180 signed arithmetic case. That form is created by the compiler
9181 often enough for folding it to be of value. One example is in
9182 computing loop trip counts after Operator Strength Reduction. */
9183 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9184 && TREE_CODE (arg0) == MULT_EXPR
9185 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9186 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9187 && integer_zerop (arg1))
9189 tree const1 = TREE_OPERAND (arg0, 1);
9190 tree const2 = arg1; /* zero */
9191 tree variable1 = TREE_OPERAND (arg0, 0);
9192 enum tree_code cmp_code = code;
9194 /* Handle unfolded multiplication by zero. */
9195 if (integer_zerop (const1))
9196 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9198 fold_overflow_warning (("assuming signed overflow does not occur when "
9199 "eliminating multiplication in comparison "
9200 "with zero"),
9201 WARN_STRICT_OVERFLOW_COMPARISON);
9203 /* If const1 is negative we swap the sense of the comparison. */
9204 if (tree_int_cst_sgn (const1) < 0)
9205 cmp_code = swap_tree_comparison (cmp_code);
9207 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9210 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9211 if (tem)
9212 return tem;
9214 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9216 tree targ0 = strip_float_extensions (arg0);
9217 tree targ1 = strip_float_extensions (arg1);
9218 tree newtype = TREE_TYPE (targ0);
9220 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9221 newtype = TREE_TYPE (targ1);
9223 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9224 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9225 return fold_build2_loc (loc, code, type,
9226 fold_convert_loc (loc, newtype, targ0),
9227 fold_convert_loc (loc, newtype, targ1));
9229 /* (-a) CMP (-b) -> b CMP a */
9230 if (TREE_CODE (arg0) == NEGATE_EXPR
9231 && TREE_CODE (arg1) == NEGATE_EXPR)
9232 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9233 TREE_OPERAND (arg0, 0));
9235 if (TREE_CODE (arg1) == REAL_CST)
9237 REAL_VALUE_TYPE cst;
9238 cst = TREE_REAL_CST (arg1);
9240 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9241 if (TREE_CODE (arg0) == NEGATE_EXPR)
9242 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9243 TREE_OPERAND (arg0, 0),
9244 build_real (TREE_TYPE (arg1),
9245 real_value_negate (&cst)));
9247 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9248 /* a CMP (-0) -> a CMP 0 */
9249 if (REAL_VALUE_MINUS_ZERO (cst))
9250 return fold_build2_loc (loc, code, type, arg0,
9251 build_real (TREE_TYPE (arg1), dconst0));
9253 /* x != NaN is always true, other ops are always false. */
9254 if (REAL_VALUE_ISNAN (cst)
9255 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9257 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9258 return omit_one_operand_loc (loc, type, tem, arg0);
9261 /* Fold comparisons against infinity. */
9262 if (REAL_VALUE_ISINF (cst)
9263 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9265 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9266 if (tem != NULL_TREE)
9267 return tem;
9271 /* If this is a comparison of a real constant with a PLUS_EXPR
9272 or a MINUS_EXPR of a real constant, we can convert it into a
9273 comparison with a revised real constant as long as no overflow
9274 occurs when unsafe_math_optimizations are enabled. */
9275 if (flag_unsafe_math_optimizations
9276 && TREE_CODE (arg1) == REAL_CST
9277 && (TREE_CODE (arg0) == PLUS_EXPR
9278 || TREE_CODE (arg0) == MINUS_EXPR)
9279 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9280 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9281 ? MINUS_EXPR : PLUS_EXPR,
9282 arg1, TREE_OPERAND (arg0, 1)))
9283 && !TREE_OVERFLOW (tem))
9284 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9286 /* Likewise, we can simplify a comparison of a real constant with
9287 a MINUS_EXPR whose first operand is also a real constant, i.e.
9288 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9289 floating-point types only if -fassociative-math is set. */
9290 if (flag_associative_math
9291 && TREE_CODE (arg1) == REAL_CST
9292 && TREE_CODE (arg0) == MINUS_EXPR
9293 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9294 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9295 arg1))
9296 && !TREE_OVERFLOW (tem))
9297 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9298 TREE_OPERAND (arg0, 1), tem);
9300 /* Fold comparisons against built-in math functions. */
9301 if (TREE_CODE (arg1) == REAL_CST
9302 && flag_unsafe_math_optimizations
9303 && ! flag_errno_math)
9305 enum built_in_function fcode = builtin_mathfn_code (arg0);
9307 if (fcode != END_BUILTINS)
9309 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9310 if (tem != NULL_TREE)
9311 return tem;
9316 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9317 && CONVERT_EXPR_P (arg0))
9319 /* If we are widening one operand of an integer comparison,
9320 see if the other operand is similarly being widened. Perhaps we
9321 can do the comparison in the narrower type. */
9322 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9323 if (tem)
9324 return tem;
9326 /* Or if we are changing signedness. */
9327 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9328 if (tem)
9329 return tem;
9332 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9333 constant, we can simplify it. */
9334 if (TREE_CODE (arg1) == INTEGER_CST
9335 && (TREE_CODE (arg0) == MIN_EXPR
9336 || TREE_CODE (arg0) == MAX_EXPR)
9337 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9339 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9340 if (tem)
9341 return tem;
9344 /* Simplify comparison of something with itself. (For IEEE
9345 floating-point, we can only do some of these simplifications.) */
9346 if (operand_equal_p (arg0, arg1, 0))
9348 switch (code)
9350 case EQ_EXPR:
9351 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9352 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9353 return constant_boolean_node (1, type);
9354 break;
9356 case GE_EXPR:
9357 case LE_EXPR:
9358 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9359 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9360 return constant_boolean_node (1, type);
9361 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9363 case NE_EXPR:
9364 /* For NE, we can only do this simplification if integer
9365 or we don't honor IEEE floating point NaNs. */
9366 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9367 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9368 break;
9369 /* ... fall through ... */
9370 case GT_EXPR:
9371 case LT_EXPR:
9372 return constant_boolean_node (0, type);
9373 default:
9374 gcc_unreachable ();
9378 /* If we are comparing an expression that just has comparisons
9379 of two integer values, arithmetic expressions of those comparisons,
9380 and constants, we can simplify it. There are only three cases
9381 to check: the two values can either be equal, the first can be
9382 greater, or the second can be greater. Fold the expression for
9383 those three values. Since each value must be 0 or 1, we have
9384 eight possibilities, each of which corresponds to the constant 0
9385 or 1 or one of the six possible comparisons.
9387 This handles common cases like (a > b) == 0 but also handles
9388 expressions like ((x > y) - (y > x)) > 0, which supposedly
9389 occur in macroized code. */
9391 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9393 tree cval1 = 0, cval2 = 0;
9394 int save_p = 0;
9396 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9397 /* Don't handle degenerate cases here; they should already
9398 have been handled anyway. */
9399 && cval1 != 0 && cval2 != 0
9400 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9401 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9402 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9403 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9404 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9405 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9406 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9408 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9409 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9411 /* We can't just pass T to eval_subst in case cval1 or cval2
9412 was the same as ARG1. */
9414 tree high_result
9415 = fold_build2_loc (loc, code, type,
9416 eval_subst (loc, arg0, cval1, maxval,
9417 cval2, minval),
9418 arg1);
9419 tree equal_result
9420 = fold_build2_loc (loc, code, type,
9421 eval_subst (loc, arg0, cval1, maxval,
9422 cval2, maxval),
9423 arg1);
9424 tree low_result
9425 = fold_build2_loc (loc, code, type,
9426 eval_subst (loc, arg0, cval1, minval,
9427 cval2, maxval),
9428 arg1);
9430 /* All three of these results should be 0 or 1. Confirm they are.
9431 Then use those values to select the proper code to use. */
9433 if (TREE_CODE (high_result) == INTEGER_CST
9434 && TREE_CODE (equal_result) == INTEGER_CST
9435 && TREE_CODE (low_result) == INTEGER_CST)
9437 /* Make a 3-bit mask with the high-order bit being the
9438 value for `>', the next for '=', and the low for '<'. */
9439 switch ((integer_onep (high_result) * 4)
9440 + (integer_onep (equal_result) * 2)
9441 + integer_onep (low_result))
9443 case 0:
9444 /* Always false. */
9445 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9446 case 1:
9447 code = LT_EXPR;
9448 break;
9449 case 2:
9450 code = EQ_EXPR;
9451 break;
9452 case 3:
9453 code = LE_EXPR;
9454 break;
9455 case 4:
9456 code = GT_EXPR;
9457 break;
9458 case 5:
9459 code = NE_EXPR;
9460 break;
9461 case 6:
9462 code = GE_EXPR;
9463 break;
9464 case 7:
9465 /* Always true. */
9466 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9469 if (save_p)
9471 tem = save_expr (build2 (code, type, cval1, cval2));
9472 SET_EXPR_LOCATION (tem, loc);
9473 return tem;
9475 return fold_build2_loc (loc, code, type, cval1, cval2);
9480 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9481 into a single range test. */
9482 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9483 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9484 && TREE_CODE (arg1) == INTEGER_CST
9485 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9486 && !integer_zerop (TREE_OPERAND (arg0, 1))
9487 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9488 && !TREE_OVERFLOW (arg1))
9490 tem = fold_div_compare (loc, code, type, arg0, arg1);
9491 if (tem != NULL_TREE)
9492 return tem;
9495 /* Fold ~X op ~Y as Y op X. */
9496 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9497 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9499 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9500 return fold_build2_loc (loc, code, type,
9501 fold_convert_loc (loc, cmp_type,
9502 TREE_OPERAND (arg1, 0)),
9503 TREE_OPERAND (arg0, 0));
9506 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9507 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9508 && TREE_CODE (arg1) == INTEGER_CST)
9510 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9511 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9512 TREE_OPERAND (arg0, 0),
9513 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9514 fold_convert_loc (loc, cmp_type, arg1)));
9517 return NULL_TREE;
9521 /* Subroutine of fold_binary. Optimize complex multiplications of the
9522 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9523 argument EXPR represents the expression "z" of type TYPE. */
9525 static tree
9526 fold_mult_zconjz (location_t loc, tree type, tree expr)
9528 tree itype = TREE_TYPE (type);
9529 tree rpart, ipart, tem;
9531 if (TREE_CODE (expr) == COMPLEX_EXPR)
9533 rpart = TREE_OPERAND (expr, 0);
9534 ipart = TREE_OPERAND (expr, 1);
9536 else if (TREE_CODE (expr) == COMPLEX_CST)
9538 rpart = TREE_REALPART (expr);
9539 ipart = TREE_IMAGPART (expr);
9541 else
9543 expr = save_expr (expr);
9544 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9545 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9548 rpart = save_expr (rpart);
9549 ipart = save_expr (ipart);
9550 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9551 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9552 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9553 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9554 build_zero_cst (itype));
9558 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9559 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9560 guarantees that P and N have the same least significant log2(M) bits.
9561 N is not otherwise constrained. In particular, N is not normalized to
9562 0 <= N < M as is common. In general, the precise value of P is unknown.
9563 M is chosen as large as possible such that constant N can be determined.
9565 Returns M and sets *RESIDUE to N.
9567 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9568 account. This is not always possible due to PR 35705.
9571 static unsigned HOST_WIDE_INT
9572 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9573 bool allow_func_align)
9575 enum tree_code code;
9577 *residue = 0;
9579 code = TREE_CODE (expr);
9580 if (code == ADDR_EXPR)
9582 unsigned int bitalign;
9583 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9584 *residue /= BITS_PER_UNIT;
9585 return bitalign / BITS_PER_UNIT;
9587 else if (code == POINTER_PLUS_EXPR)
9589 tree op0, op1;
9590 unsigned HOST_WIDE_INT modulus;
9591 enum tree_code inner_code;
9593 op0 = TREE_OPERAND (expr, 0);
9594 STRIP_NOPS (op0);
9595 modulus = get_pointer_modulus_and_residue (op0, residue,
9596 allow_func_align);
9598 op1 = TREE_OPERAND (expr, 1);
9599 STRIP_NOPS (op1);
9600 inner_code = TREE_CODE (op1);
9601 if (inner_code == INTEGER_CST)
9603 *residue += TREE_INT_CST_LOW (op1);
9604 return modulus;
9606 else if (inner_code == MULT_EXPR)
9608 op1 = TREE_OPERAND (op1, 1);
9609 if (TREE_CODE (op1) == INTEGER_CST)
9611 unsigned HOST_WIDE_INT align;
9613 /* Compute the greatest power-of-2 divisor of op1. */
9614 align = TREE_INT_CST_LOW (op1);
9615 align &= -align;
9617 /* If align is non-zero and less than *modulus, replace
9618 *modulus with align., If align is 0, then either op1 is 0
9619 or the greatest power-of-2 divisor of op1 doesn't fit in an
9620 unsigned HOST_WIDE_INT. In either case, no additional
9621 constraint is imposed. */
9622 if (align)
9623 modulus = MIN (modulus, align);
9625 return modulus;
9630 /* If we get here, we were unable to determine anything useful about the
9631 expression. */
9632 return 1;
9635 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9636 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9638 static bool
9639 vec_cst_ctor_to_array (tree arg, tree *elts)
9641 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9643 if (TREE_CODE (arg) == VECTOR_CST)
9645 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9646 elts[i] = VECTOR_CST_ELT (arg, i);
9648 else if (TREE_CODE (arg) == CONSTRUCTOR)
9650 constructor_elt *elt;
9652 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9653 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9654 return false;
9655 else
9656 elts[i] = elt->value;
9658 else
9659 return false;
9660 for (; i < nelts; i++)
9661 elts[i]
9662 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9663 return true;
9666 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9667 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9668 NULL_TREE otherwise. */
9670 static tree
9671 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9673 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9674 tree *elts;
9675 bool need_ctor = false;
9677 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9678 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9679 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9680 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9681 return NULL_TREE;
9683 elts = XALLOCAVEC (tree, nelts * 3);
9684 if (!vec_cst_ctor_to_array (arg0, elts)
9685 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9686 return NULL_TREE;
9688 for (i = 0; i < nelts; i++)
9690 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9691 need_ctor = true;
9692 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9695 if (need_ctor)
9697 vec<constructor_elt, va_gc> *v;
9698 vec_alloc (v, nelts);
9699 for (i = 0; i < nelts; i++)
9700 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9701 return build_constructor (type, v);
9703 else
9704 return build_vector (type, &elts[2 * nelts]);
9707 /* Try to fold a pointer difference of type TYPE two address expressions of
9708 array references AREF0 and AREF1 using location LOC. Return a
9709 simplified expression for the difference or NULL_TREE. */
9711 static tree
9712 fold_addr_of_array_ref_difference (location_t loc, tree type,
9713 tree aref0, tree aref1)
9715 tree base0 = TREE_OPERAND (aref0, 0);
9716 tree base1 = TREE_OPERAND (aref1, 0);
9717 tree base_offset = build_int_cst (type, 0);
9719 /* If the bases are array references as well, recurse. If the bases
9720 are pointer indirections compute the difference of the pointers.
9721 If the bases are equal, we are set. */
9722 if ((TREE_CODE (base0) == ARRAY_REF
9723 && TREE_CODE (base1) == ARRAY_REF
9724 && (base_offset
9725 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9726 || (INDIRECT_REF_P (base0)
9727 && INDIRECT_REF_P (base1)
9728 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9729 TREE_OPERAND (base0, 0),
9730 TREE_OPERAND (base1, 0))))
9731 || operand_equal_p (base0, base1, 0))
9733 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9734 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9735 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9736 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9737 return fold_build2_loc (loc, PLUS_EXPR, type,
9738 base_offset,
9739 fold_build2_loc (loc, MULT_EXPR, type,
9740 diff, esz));
9742 return NULL_TREE;
9745 /* If the real or vector real constant CST of type TYPE has an exact
9746 inverse, return it, else return NULL. */
9748 static tree
9749 exact_inverse (tree type, tree cst)
9751 REAL_VALUE_TYPE r;
9752 tree unit_type, *elts;
9753 enum machine_mode mode;
9754 unsigned vec_nelts, i;
9756 switch (TREE_CODE (cst))
9758 case REAL_CST:
9759 r = TREE_REAL_CST (cst);
9761 if (exact_real_inverse (TYPE_MODE (type), &r))
9762 return build_real (type, r);
9764 return NULL_TREE;
9766 case VECTOR_CST:
9767 vec_nelts = VECTOR_CST_NELTS (cst);
9768 elts = XALLOCAVEC (tree, vec_nelts);
9769 unit_type = TREE_TYPE (type);
9770 mode = TYPE_MODE (unit_type);
9772 for (i = 0; i < vec_nelts; i++)
9774 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9775 if (!exact_real_inverse (mode, &r))
9776 return NULL_TREE;
9777 elts[i] = build_real (unit_type, r);
9780 return build_vector (type, elts);
9782 default:
9783 return NULL_TREE;
9787 /* Fold a binary expression of code CODE and type TYPE with operands
9788 OP0 and OP1. LOC is the location of the resulting expression.
9789 Return the folded expression if folding is successful. Otherwise,
9790 return NULL_TREE. */
9792 tree
9793 fold_binary_loc (location_t loc,
9794 enum tree_code code, tree type, tree op0, tree op1)
9796 enum tree_code_class kind = TREE_CODE_CLASS (code);
9797 tree arg0, arg1, tem;
9798 tree t1 = NULL_TREE;
9799 bool strict_overflow_p;
9801 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9802 && TREE_CODE_LENGTH (code) == 2
9803 && op0 != NULL_TREE
9804 && op1 != NULL_TREE);
9806 arg0 = op0;
9807 arg1 = op1;
9809 /* Strip any conversions that don't change the mode. This is
9810 safe for every expression, except for a comparison expression
9811 because its signedness is derived from its operands. So, in
9812 the latter case, only strip conversions that don't change the
9813 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9814 preserved.
9816 Note that this is done as an internal manipulation within the
9817 constant folder, in order to find the simplest representation
9818 of the arguments so that their form can be studied. In any
9819 cases, the appropriate type conversions should be put back in
9820 the tree that will get out of the constant folder. */
9822 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9824 STRIP_SIGN_NOPS (arg0);
9825 STRIP_SIGN_NOPS (arg1);
9827 else
9829 STRIP_NOPS (arg0);
9830 STRIP_NOPS (arg1);
9833 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9834 constant but we can't do arithmetic on them. */
9835 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9836 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9837 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9838 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9839 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9840 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9842 if (kind == tcc_binary)
9844 /* Make sure type and arg0 have the same saturating flag. */
9845 gcc_assert (TYPE_SATURATING (type)
9846 == TYPE_SATURATING (TREE_TYPE (arg0)));
9847 tem = const_binop (code, arg0, arg1);
9849 else if (kind == tcc_comparison)
9850 tem = fold_relational_const (code, type, arg0, arg1);
9851 else
9852 tem = NULL_TREE;
9854 if (tem != NULL_TREE)
9856 if (TREE_TYPE (tem) != type)
9857 tem = fold_convert_loc (loc, type, tem);
9858 return tem;
9862 /* If this is a commutative operation, and ARG0 is a constant, move it
9863 to ARG1 to reduce the number of tests below. */
9864 if (commutative_tree_code (code)
9865 && tree_swap_operands_p (arg0, arg1, true))
9866 return fold_build2_loc (loc, code, type, op1, op0);
9868 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9870 First check for cases where an arithmetic operation is applied to a
9871 compound, conditional, or comparison operation. Push the arithmetic
9872 operation inside the compound or conditional to see if any folding
9873 can then be done. Convert comparison to conditional for this purpose.
9874 The also optimizes non-constant cases that used to be done in
9875 expand_expr.
9877 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9878 one of the operands is a comparison and the other is a comparison, a
9879 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9880 code below would make the expression more complex. Change it to a
9881 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9882 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9884 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9885 || code == EQ_EXPR || code == NE_EXPR)
9886 && TREE_CODE (type) != VECTOR_TYPE
9887 && ((truth_value_p (TREE_CODE (arg0))
9888 && (truth_value_p (TREE_CODE (arg1))
9889 || (TREE_CODE (arg1) == BIT_AND_EXPR
9890 && integer_onep (TREE_OPERAND (arg1, 1)))))
9891 || (truth_value_p (TREE_CODE (arg1))
9892 && (truth_value_p (TREE_CODE (arg0))
9893 || (TREE_CODE (arg0) == BIT_AND_EXPR
9894 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9896 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9897 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9898 : TRUTH_XOR_EXPR,
9899 boolean_type_node,
9900 fold_convert_loc (loc, boolean_type_node, arg0),
9901 fold_convert_loc (loc, boolean_type_node, arg1));
9903 if (code == EQ_EXPR)
9904 tem = invert_truthvalue_loc (loc, tem);
9906 return fold_convert_loc (loc, type, tem);
9909 if (TREE_CODE_CLASS (code) == tcc_binary
9910 || TREE_CODE_CLASS (code) == tcc_comparison)
9912 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9914 tem = fold_build2_loc (loc, code, type,
9915 fold_convert_loc (loc, TREE_TYPE (op0),
9916 TREE_OPERAND (arg0, 1)), op1);
9917 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9918 tem);
9920 if (TREE_CODE (arg1) == COMPOUND_EXPR
9921 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9923 tem = fold_build2_loc (loc, code, type, op0,
9924 fold_convert_loc (loc, TREE_TYPE (op1),
9925 TREE_OPERAND (arg1, 1)));
9926 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9927 tem);
9930 if (TREE_CODE (arg0) == COND_EXPR
9931 || TREE_CODE (arg0) == VEC_COND_EXPR
9932 || COMPARISON_CLASS_P (arg0))
9934 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9935 arg0, arg1,
9936 /*cond_first_p=*/1);
9937 if (tem != NULL_TREE)
9938 return tem;
9941 if (TREE_CODE (arg1) == COND_EXPR
9942 || TREE_CODE (arg1) == VEC_COND_EXPR
9943 || COMPARISON_CLASS_P (arg1))
9945 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9946 arg1, arg0,
9947 /*cond_first_p=*/0);
9948 if (tem != NULL_TREE)
9949 return tem;
9953 switch (code)
9955 case MEM_REF:
9956 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9957 if (TREE_CODE (arg0) == ADDR_EXPR
9958 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9960 tree iref = TREE_OPERAND (arg0, 0);
9961 return fold_build2 (MEM_REF, type,
9962 TREE_OPERAND (iref, 0),
9963 int_const_binop (PLUS_EXPR, arg1,
9964 TREE_OPERAND (iref, 1)));
9967 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9968 if (TREE_CODE (arg0) == ADDR_EXPR
9969 && handled_component_p (TREE_OPERAND (arg0, 0)))
9971 tree base;
9972 HOST_WIDE_INT coffset;
9973 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9974 &coffset);
9975 if (!base)
9976 return NULL_TREE;
9977 return fold_build2 (MEM_REF, type,
9978 build_fold_addr_expr (base),
9979 int_const_binop (PLUS_EXPR, arg1,
9980 size_int (coffset)));
9983 return NULL_TREE;
9985 case POINTER_PLUS_EXPR:
9986 /* 0 +p index -> (type)index */
9987 if (integer_zerop (arg0))
9988 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9990 /* PTR +p 0 -> PTR */
9991 if (integer_zerop (arg1))
9992 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9994 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9995 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9996 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9997 return fold_convert_loc (loc, type,
9998 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9999 fold_convert_loc (loc, sizetype,
10000 arg1),
10001 fold_convert_loc (loc, sizetype,
10002 arg0)));
10004 /* (PTR +p B) +p A -> PTR +p (B + A) */
10005 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10007 tree inner;
10008 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10009 tree arg00 = TREE_OPERAND (arg0, 0);
10010 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10011 arg01, fold_convert_loc (loc, sizetype, arg1));
10012 return fold_convert_loc (loc, type,
10013 fold_build_pointer_plus_loc (loc,
10014 arg00, inner));
10017 /* PTR_CST +p CST -> CST1 */
10018 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10019 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10020 fold_convert_loc (loc, type, arg1));
10022 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10023 of the array. Loop optimizer sometimes produce this type of
10024 expressions. */
10025 if (TREE_CODE (arg0) == ADDR_EXPR)
10027 tem = try_move_mult_to_index (loc, arg0,
10028 fold_convert_loc (loc,
10029 ssizetype, arg1));
10030 if (tem)
10031 return fold_convert_loc (loc, type, tem);
10034 return NULL_TREE;
10036 case PLUS_EXPR:
10037 /* A + (-B) -> A - B */
10038 if (TREE_CODE (arg1) == NEGATE_EXPR)
10039 return fold_build2_loc (loc, MINUS_EXPR, type,
10040 fold_convert_loc (loc, type, arg0),
10041 fold_convert_loc (loc, type,
10042 TREE_OPERAND (arg1, 0)));
10043 /* (-A) + B -> B - A */
10044 if (TREE_CODE (arg0) == NEGATE_EXPR
10045 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10046 return fold_build2_loc (loc, MINUS_EXPR, type,
10047 fold_convert_loc (loc, type, arg1),
10048 fold_convert_loc (loc, type,
10049 TREE_OPERAND (arg0, 0)));
10051 if (INTEGRAL_TYPE_P (type))
10053 /* Convert ~A + 1 to -A. */
10054 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10055 && integer_onep (arg1))
10056 return fold_build1_loc (loc, NEGATE_EXPR, type,
10057 fold_convert_loc (loc, type,
10058 TREE_OPERAND (arg0, 0)));
10060 /* ~X + X is -1. */
10061 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10062 && !TYPE_OVERFLOW_TRAPS (type))
10064 tree tem = TREE_OPERAND (arg0, 0);
10066 STRIP_NOPS (tem);
10067 if (operand_equal_p (tem, arg1, 0))
10069 t1 = build_int_cst_type (type, -1);
10070 return omit_one_operand_loc (loc, type, t1, arg1);
10074 /* X + ~X is -1. */
10075 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10076 && !TYPE_OVERFLOW_TRAPS (type))
10078 tree tem = TREE_OPERAND (arg1, 0);
10080 STRIP_NOPS (tem);
10081 if (operand_equal_p (arg0, tem, 0))
10083 t1 = build_int_cst_type (type, -1);
10084 return omit_one_operand_loc (loc, type, t1, arg0);
10088 /* X + (X / CST) * -CST is X % CST. */
10089 if (TREE_CODE (arg1) == MULT_EXPR
10090 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10091 && operand_equal_p (arg0,
10092 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10094 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10095 tree cst1 = TREE_OPERAND (arg1, 1);
10096 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10097 cst1, cst0);
10098 if (sum && integer_zerop (sum))
10099 return fold_convert_loc (loc, type,
10100 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10101 TREE_TYPE (arg0), arg0,
10102 cst0));
10106 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10107 one. Make sure the type is not saturating and has the signedness of
10108 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10109 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10110 if ((TREE_CODE (arg0) == MULT_EXPR
10111 || TREE_CODE (arg1) == MULT_EXPR)
10112 && !TYPE_SATURATING (type)
10113 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10114 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10115 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10117 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10118 if (tem)
10119 return tem;
10122 if (! FLOAT_TYPE_P (type))
10124 if (integer_zerop (arg1))
10125 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10127 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10128 with a constant, and the two constants have no bits in common,
10129 we should treat this as a BIT_IOR_EXPR since this may produce more
10130 simplifications. */
10131 if (TREE_CODE (arg0) == BIT_AND_EXPR
10132 && TREE_CODE (arg1) == BIT_AND_EXPR
10133 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10134 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10135 && integer_zerop (const_binop (BIT_AND_EXPR,
10136 TREE_OPERAND (arg0, 1),
10137 TREE_OPERAND (arg1, 1))))
10139 code = BIT_IOR_EXPR;
10140 goto bit_ior;
10143 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10144 (plus (plus (mult) (mult)) (foo)) so that we can
10145 take advantage of the factoring cases below. */
10146 if (TYPE_OVERFLOW_WRAPS (type)
10147 && (((TREE_CODE (arg0) == PLUS_EXPR
10148 || TREE_CODE (arg0) == MINUS_EXPR)
10149 && TREE_CODE (arg1) == MULT_EXPR)
10150 || ((TREE_CODE (arg1) == PLUS_EXPR
10151 || TREE_CODE (arg1) == MINUS_EXPR)
10152 && TREE_CODE (arg0) == MULT_EXPR)))
10154 tree parg0, parg1, parg, marg;
10155 enum tree_code pcode;
10157 if (TREE_CODE (arg1) == MULT_EXPR)
10158 parg = arg0, marg = arg1;
10159 else
10160 parg = arg1, marg = arg0;
10161 pcode = TREE_CODE (parg);
10162 parg0 = TREE_OPERAND (parg, 0);
10163 parg1 = TREE_OPERAND (parg, 1);
10164 STRIP_NOPS (parg0);
10165 STRIP_NOPS (parg1);
10167 if (TREE_CODE (parg0) == MULT_EXPR
10168 && TREE_CODE (parg1) != MULT_EXPR)
10169 return fold_build2_loc (loc, pcode, type,
10170 fold_build2_loc (loc, PLUS_EXPR, type,
10171 fold_convert_loc (loc, type,
10172 parg0),
10173 fold_convert_loc (loc, type,
10174 marg)),
10175 fold_convert_loc (loc, type, parg1));
10176 if (TREE_CODE (parg0) != MULT_EXPR
10177 && TREE_CODE (parg1) == MULT_EXPR)
10178 return
10179 fold_build2_loc (loc, PLUS_EXPR, type,
10180 fold_convert_loc (loc, type, parg0),
10181 fold_build2_loc (loc, pcode, type,
10182 fold_convert_loc (loc, type, marg),
10183 fold_convert_loc (loc, type,
10184 parg1)));
10187 else
10189 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10190 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10191 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10193 /* Likewise if the operands are reversed. */
10194 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10195 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10197 /* Convert X + -C into X - C. */
10198 if (TREE_CODE (arg1) == REAL_CST
10199 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10201 tem = fold_negate_const (arg1, type);
10202 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10203 return fold_build2_loc (loc, MINUS_EXPR, type,
10204 fold_convert_loc (loc, type, arg0),
10205 fold_convert_loc (loc, type, tem));
10208 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10209 to __complex__ ( x, y ). This is not the same for SNaNs or
10210 if signed zeros are involved. */
10211 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10212 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10213 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10215 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10216 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10217 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10218 bool arg0rz = false, arg0iz = false;
10219 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10220 || (arg0i && (arg0iz = real_zerop (arg0i))))
10222 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10223 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10224 if (arg0rz && arg1i && real_zerop (arg1i))
10226 tree rp = arg1r ? arg1r
10227 : build1 (REALPART_EXPR, rtype, arg1);
10228 tree ip = arg0i ? arg0i
10229 : build1 (IMAGPART_EXPR, rtype, arg0);
10230 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10232 else if (arg0iz && arg1r && real_zerop (arg1r))
10234 tree rp = arg0r ? arg0r
10235 : build1 (REALPART_EXPR, rtype, arg0);
10236 tree ip = arg1i ? arg1i
10237 : build1 (IMAGPART_EXPR, rtype, arg1);
10238 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10243 if (flag_unsafe_math_optimizations
10244 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10245 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10246 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10247 return tem;
10249 /* Convert x+x into x*2.0. */
10250 if (operand_equal_p (arg0, arg1, 0)
10251 && SCALAR_FLOAT_TYPE_P (type))
10252 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10253 build_real (type, dconst2));
10255 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10256 We associate floats only if the user has specified
10257 -fassociative-math. */
10258 if (flag_associative_math
10259 && TREE_CODE (arg1) == PLUS_EXPR
10260 && TREE_CODE (arg0) != MULT_EXPR)
10262 tree tree10 = TREE_OPERAND (arg1, 0);
10263 tree tree11 = TREE_OPERAND (arg1, 1);
10264 if (TREE_CODE (tree11) == MULT_EXPR
10265 && TREE_CODE (tree10) == MULT_EXPR)
10267 tree tree0;
10268 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10269 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10272 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10273 We associate floats only if the user has specified
10274 -fassociative-math. */
10275 if (flag_associative_math
10276 && TREE_CODE (arg0) == PLUS_EXPR
10277 && TREE_CODE (arg1) != MULT_EXPR)
10279 tree tree00 = TREE_OPERAND (arg0, 0);
10280 tree tree01 = TREE_OPERAND (arg0, 1);
10281 if (TREE_CODE (tree01) == MULT_EXPR
10282 && TREE_CODE (tree00) == MULT_EXPR)
10284 tree tree0;
10285 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10286 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10291 bit_rotate:
10292 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10293 is a rotate of A by C1 bits. */
10294 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10295 is a rotate of A by B bits. */
10297 enum tree_code code0, code1;
10298 tree rtype;
10299 code0 = TREE_CODE (arg0);
10300 code1 = TREE_CODE (arg1);
10301 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10302 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10303 && operand_equal_p (TREE_OPERAND (arg0, 0),
10304 TREE_OPERAND (arg1, 0), 0)
10305 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10306 TYPE_UNSIGNED (rtype))
10307 /* Only create rotates in complete modes. Other cases are not
10308 expanded properly. */
10309 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10311 tree tree01, tree11;
10312 enum tree_code code01, code11;
10314 tree01 = TREE_OPERAND (arg0, 1);
10315 tree11 = TREE_OPERAND (arg1, 1);
10316 STRIP_NOPS (tree01);
10317 STRIP_NOPS (tree11);
10318 code01 = TREE_CODE (tree01);
10319 code11 = TREE_CODE (tree11);
10320 if (code01 == INTEGER_CST
10321 && code11 == INTEGER_CST
10322 && TREE_INT_CST_HIGH (tree01) == 0
10323 && TREE_INT_CST_HIGH (tree11) == 0
10324 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10325 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10327 tem = build2_loc (loc, LROTATE_EXPR,
10328 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10329 TREE_OPERAND (arg0, 0),
10330 code0 == LSHIFT_EXPR ? tree01 : tree11);
10331 return fold_convert_loc (loc, type, tem);
10333 else if (code11 == MINUS_EXPR)
10335 tree tree110, tree111;
10336 tree110 = TREE_OPERAND (tree11, 0);
10337 tree111 = TREE_OPERAND (tree11, 1);
10338 STRIP_NOPS (tree110);
10339 STRIP_NOPS (tree111);
10340 if (TREE_CODE (tree110) == INTEGER_CST
10341 && 0 == compare_tree_int (tree110,
10342 TYPE_PRECISION
10343 (TREE_TYPE (TREE_OPERAND
10344 (arg0, 0))))
10345 && operand_equal_p (tree01, tree111, 0))
10346 return
10347 fold_convert_loc (loc, type,
10348 build2 ((code0 == LSHIFT_EXPR
10349 ? LROTATE_EXPR
10350 : RROTATE_EXPR),
10351 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10352 TREE_OPERAND (arg0, 0), tree01));
10354 else if (code01 == MINUS_EXPR)
10356 tree tree010, tree011;
10357 tree010 = TREE_OPERAND (tree01, 0);
10358 tree011 = TREE_OPERAND (tree01, 1);
10359 STRIP_NOPS (tree010);
10360 STRIP_NOPS (tree011);
10361 if (TREE_CODE (tree010) == INTEGER_CST
10362 && 0 == compare_tree_int (tree010,
10363 TYPE_PRECISION
10364 (TREE_TYPE (TREE_OPERAND
10365 (arg0, 0))))
10366 && operand_equal_p (tree11, tree011, 0))
10367 return fold_convert_loc
10368 (loc, type,
10369 build2 ((code0 != LSHIFT_EXPR
10370 ? LROTATE_EXPR
10371 : RROTATE_EXPR),
10372 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10373 TREE_OPERAND (arg0, 0), tree11));
10378 associate:
10379 /* In most languages, can't associate operations on floats through
10380 parentheses. Rather than remember where the parentheses were, we
10381 don't associate floats at all, unless the user has specified
10382 -fassociative-math.
10383 And, we need to make sure type is not saturating. */
10385 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10386 && !TYPE_SATURATING (type))
10388 tree var0, con0, lit0, minus_lit0;
10389 tree var1, con1, lit1, minus_lit1;
10390 tree atype = type;
10391 bool ok = true;
10393 /* Split both trees into variables, constants, and literals. Then
10394 associate each group together, the constants with literals,
10395 then the result with variables. This increases the chances of
10396 literals being recombined later and of generating relocatable
10397 expressions for the sum of a constant and literal. */
10398 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10399 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10400 code == MINUS_EXPR);
10402 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10403 if (code == MINUS_EXPR)
10404 code = PLUS_EXPR;
10406 /* With undefined overflow prefer doing association in a type
10407 which wraps on overflow, if that is one of the operand types. */
10408 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10409 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10411 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10412 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10413 atype = TREE_TYPE (arg0);
10414 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10415 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10416 atype = TREE_TYPE (arg1);
10417 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10420 /* With undefined overflow we can only associate constants with one
10421 variable, and constants whose association doesn't overflow. */
10422 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10423 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10425 if (var0 && var1)
10427 tree tmp0 = var0;
10428 tree tmp1 = var1;
10430 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10431 tmp0 = TREE_OPERAND (tmp0, 0);
10432 if (CONVERT_EXPR_P (tmp0)
10433 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10434 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10435 <= TYPE_PRECISION (atype)))
10436 tmp0 = TREE_OPERAND (tmp0, 0);
10437 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10438 tmp1 = TREE_OPERAND (tmp1, 0);
10439 if (CONVERT_EXPR_P (tmp1)
10440 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10441 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10442 <= TYPE_PRECISION (atype)))
10443 tmp1 = TREE_OPERAND (tmp1, 0);
10444 /* The only case we can still associate with two variables
10445 is if they are the same, modulo negation and bit-pattern
10446 preserving conversions. */
10447 if (!operand_equal_p (tmp0, tmp1, 0))
10448 ok = false;
10452 /* Only do something if we found more than two objects. Otherwise,
10453 nothing has changed and we risk infinite recursion. */
10454 if (ok
10455 && (2 < ((var0 != 0) + (var1 != 0)
10456 + (con0 != 0) + (con1 != 0)
10457 + (lit0 != 0) + (lit1 != 0)
10458 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10460 bool any_overflows = false;
10461 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10462 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10463 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10464 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10465 var0 = associate_trees (loc, var0, var1, code, atype);
10466 con0 = associate_trees (loc, con0, con1, code, atype);
10467 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10468 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10469 code, atype);
10471 /* Preserve the MINUS_EXPR if the negative part of the literal is
10472 greater than the positive part. Otherwise, the multiplicative
10473 folding code (i.e extract_muldiv) may be fooled in case
10474 unsigned constants are subtracted, like in the following
10475 example: ((X*2 + 4) - 8U)/2. */
10476 if (minus_lit0 && lit0)
10478 if (TREE_CODE (lit0) == INTEGER_CST
10479 && TREE_CODE (minus_lit0) == INTEGER_CST
10480 && tree_int_cst_lt (lit0, minus_lit0))
10482 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10483 MINUS_EXPR, atype);
10484 lit0 = 0;
10486 else
10488 lit0 = associate_trees (loc, lit0, minus_lit0,
10489 MINUS_EXPR, atype);
10490 minus_lit0 = 0;
10494 /* Don't introduce overflows through reassociation. */
10495 if (!any_overflows
10496 && ((lit0 && TREE_OVERFLOW (lit0))
10497 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10498 return NULL_TREE;
10500 if (minus_lit0)
10502 if (con0 == 0)
10503 return
10504 fold_convert_loc (loc, type,
10505 associate_trees (loc, var0, minus_lit0,
10506 MINUS_EXPR, atype));
10507 else
10509 con0 = associate_trees (loc, con0, minus_lit0,
10510 MINUS_EXPR, atype);
10511 return
10512 fold_convert_loc (loc, type,
10513 associate_trees (loc, var0, con0,
10514 PLUS_EXPR, atype));
10518 con0 = associate_trees (loc, con0, lit0, code, atype);
10519 return
10520 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10521 code, atype));
10525 return NULL_TREE;
10527 case MINUS_EXPR:
10528 /* Pointer simplifications for subtraction, simple reassociations. */
10529 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10531 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10532 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10533 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10535 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10536 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10537 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10538 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10539 return fold_build2_loc (loc, PLUS_EXPR, type,
10540 fold_build2_loc (loc, MINUS_EXPR, type,
10541 arg00, arg10),
10542 fold_build2_loc (loc, MINUS_EXPR, type,
10543 arg01, arg11));
10545 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10546 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10548 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10549 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10550 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10551 fold_convert_loc (loc, type, arg1));
10552 if (tmp)
10553 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10556 /* A - (-B) -> A + B */
10557 if (TREE_CODE (arg1) == NEGATE_EXPR)
10558 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10559 fold_convert_loc (loc, type,
10560 TREE_OPERAND (arg1, 0)));
10561 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10562 if (TREE_CODE (arg0) == NEGATE_EXPR
10563 && (FLOAT_TYPE_P (type)
10564 || INTEGRAL_TYPE_P (type))
10565 && negate_expr_p (arg1)
10566 && reorder_operands_p (arg0, arg1))
10567 return fold_build2_loc (loc, MINUS_EXPR, type,
10568 fold_convert_loc (loc, type,
10569 negate_expr (arg1)),
10570 fold_convert_loc (loc, type,
10571 TREE_OPERAND (arg0, 0)));
10572 /* Convert -A - 1 to ~A. */
10573 if (INTEGRAL_TYPE_P (type)
10574 && TREE_CODE (arg0) == NEGATE_EXPR
10575 && integer_onep (arg1)
10576 && !TYPE_OVERFLOW_TRAPS (type))
10577 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10578 fold_convert_loc (loc, type,
10579 TREE_OPERAND (arg0, 0)));
10581 /* Convert -1 - A to ~A. */
10582 if (INTEGRAL_TYPE_P (type)
10583 && integer_all_onesp (arg0))
10584 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10587 /* X - (X / CST) * CST is X % CST. */
10588 if (INTEGRAL_TYPE_P (type)
10589 && TREE_CODE (arg1) == MULT_EXPR
10590 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10591 && operand_equal_p (arg0,
10592 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10593 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10594 TREE_OPERAND (arg1, 1), 0))
10595 return
10596 fold_convert_loc (loc, type,
10597 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10598 arg0, TREE_OPERAND (arg1, 1)));
10600 if (! FLOAT_TYPE_P (type))
10602 if (integer_zerop (arg0))
10603 return negate_expr (fold_convert_loc (loc, type, arg1));
10604 if (integer_zerop (arg1))
10605 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10607 /* Fold A - (A & B) into ~B & A. */
10608 if (!TREE_SIDE_EFFECTS (arg0)
10609 && TREE_CODE (arg1) == BIT_AND_EXPR)
10611 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10613 tree arg10 = fold_convert_loc (loc, type,
10614 TREE_OPERAND (arg1, 0));
10615 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10616 fold_build1_loc (loc, BIT_NOT_EXPR,
10617 type, arg10),
10618 fold_convert_loc (loc, type, arg0));
10620 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10622 tree arg11 = fold_convert_loc (loc,
10623 type, TREE_OPERAND (arg1, 1));
10624 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10625 fold_build1_loc (loc, BIT_NOT_EXPR,
10626 type, arg11),
10627 fold_convert_loc (loc, type, arg0));
10631 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10632 any power of 2 minus 1. */
10633 if (TREE_CODE (arg0) == BIT_AND_EXPR
10634 && TREE_CODE (arg1) == BIT_AND_EXPR
10635 && operand_equal_p (TREE_OPERAND (arg0, 0),
10636 TREE_OPERAND (arg1, 0), 0))
10638 tree mask0 = TREE_OPERAND (arg0, 1);
10639 tree mask1 = TREE_OPERAND (arg1, 1);
10640 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10642 if (operand_equal_p (tem, mask1, 0))
10644 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10645 TREE_OPERAND (arg0, 0), mask1);
10646 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10651 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10652 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10653 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10655 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10656 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10657 (-ARG1 + ARG0) reduces to -ARG1. */
10658 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10659 return negate_expr (fold_convert_loc (loc, type, arg1));
10661 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10662 __complex__ ( x, -y ). This is not the same for SNaNs or if
10663 signed zeros are involved. */
10664 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10665 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10666 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10668 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10669 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10670 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10671 bool arg0rz = false, arg0iz = false;
10672 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10673 || (arg0i && (arg0iz = real_zerop (arg0i))))
10675 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10676 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10677 if (arg0rz && arg1i && real_zerop (arg1i))
10679 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10680 arg1r ? arg1r
10681 : build1 (REALPART_EXPR, rtype, arg1));
10682 tree ip = arg0i ? arg0i
10683 : build1 (IMAGPART_EXPR, rtype, arg0);
10684 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10686 else if (arg0iz && arg1r && real_zerop (arg1r))
10688 tree rp = arg0r ? arg0r
10689 : build1 (REALPART_EXPR, rtype, arg0);
10690 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10691 arg1i ? arg1i
10692 : build1 (IMAGPART_EXPR, rtype, arg1));
10693 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10698 /* Fold &x - &x. This can happen from &x.foo - &x.
10699 This is unsafe for certain floats even in non-IEEE formats.
10700 In IEEE, it is unsafe because it does wrong for NaNs.
10701 Also note that operand_equal_p is always false if an operand
10702 is volatile. */
10704 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10705 && operand_equal_p (arg0, arg1, 0))
10706 return build_zero_cst (type);
10708 /* A - B -> A + (-B) if B is easily negatable. */
10709 if (negate_expr_p (arg1)
10710 && ((FLOAT_TYPE_P (type)
10711 /* Avoid this transformation if B is a positive REAL_CST. */
10712 && (TREE_CODE (arg1) != REAL_CST
10713 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10714 || INTEGRAL_TYPE_P (type)))
10715 return fold_build2_loc (loc, PLUS_EXPR, type,
10716 fold_convert_loc (loc, type, arg0),
10717 fold_convert_loc (loc, type,
10718 negate_expr (arg1)));
10720 /* Try folding difference of addresses. */
10722 HOST_WIDE_INT diff;
10724 if ((TREE_CODE (arg0) == ADDR_EXPR
10725 || TREE_CODE (arg1) == ADDR_EXPR)
10726 && ptr_difference_const (arg0, arg1, &diff))
10727 return build_int_cst_type (type, diff);
10730 /* Fold &a[i] - &a[j] to i-j. */
10731 if (TREE_CODE (arg0) == ADDR_EXPR
10732 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10733 && TREE_CODE (arg1) == ADDR_EXPR
10734 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10736 tree tem = fold_addr_of_array_ref_difference (loc, type,
10737 TREE_OPERAND (arg0, 0),
10738 TREE_OPERAND (arg1, 0));
10739 if (tem)
10740 return tem;
10743 if (FLOAT_TYPE_P (type)
10744 && flag_unsafe_math_optimizations
10745 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10746 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10747 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10748 return tem;
10750 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10751 one. Make sure the type is not saturating and has the signedness of
10752 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10753 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10754 if ((TREE_CODE (arg0) == MULT_EXPR
10755 || TREE_CODE (arg1) == MULT_EXPR)
10756 && !TYPE_SATURATING (type)
10757 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10758 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10759 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10761 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10762 if (tem)
10763 return tem;
10766 goto associate;
10768 case MULT_EXPR:
10769 /* (-A) * (-B) -> A * B */
10770 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10771 return fold_build2_loc (loc, MULT_EXPR, type,
10772 fold_convert_loc (loc, type,
10773 TREE_OPERAND (arg0, 0)),
10774 fold_convert_loc (loc, type,
10775 negate_expr (arg1)));
10776 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10777 return fold_build2_loc (loc, MULT_EXPR, type,
10778 fold_convert_loc (loc, type,
10779 negate_expr (arg0)),
10780 fold_convert_loc (loc, type,
10781 TREE_OPERAND (arg1, 0)));
10783 if (! FLOAT_TYPE_P (type))
10785 if (integer_zerop (arg1))
10786 return omit_one_operand_loc (loc, type, arg1, arg0);
10787 if (integer_onep (arg1))
10788 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10789 /* Transform x * -1 into -x. Make sure to do the negation
10790 on the original operand with conversions not stripped
10791 because we can only strip non-sign-changing conversions. */
10792 if (integer_all_onesp (arg1))
10793 return fold_convert_loc (loc, type, negate_expr (op0));
10794 /* Transform x * -C into -x * C if x is easily negatable. */
10795 if (TREE_CODE (arg1) == INTEGER_CST
10796 && tree_int_cst_sgn (arg1) == -1
10797 && negate_expr_p (arg0)
10798 && (tem = negate_expr (arg1)) != arg1
10799 && !TREE_OVERFLOW (tem))
10800 return fold_build2_loc (loc, MULT_EXPR, type,
10801 fold_convert_loc (loc, type,
10802 negate_expr (arg0)),
10803 tem);
10805 /* (a * (1 << b)) is (a << b) */
10806 if (TREE_CODE (arg1) == LSHIFT_EXPR
10807 && integer_onep (TREE_OPERAND (arg1, 0)))
10808 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10809 TREE_OPERAND (arg1, 1));
10810 if (TREE_CODE (arg0) == LSHIFT_EXPR
10811 && integer_onep (TREE_OPERAND (arg0, 0)))
10812 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10813 TREE_OPERAND (arg0, 1));
10815 /* (A + A) * C -> A * 2 * C */
10816 if (TREE_CODE (arg0) == PLUS_EXPR
10817 && TREE_CODE (arg1) == INTEGER_CST
10818 && operand_equal_p (TREE_OPERAND (arg0, 0),
10819 TREE_OPERAND (arg0, 1), 0))
10820 return fold_build2_loc (loc, MULT_EXPR, type,
10821 omit_one_operand_loc (loc, type,
10822 TREE_OPERAND (arg0, 0),
10823 TREE_OPERAND (arg0, 1)),
10824 fold_build2_loc (loc, MULT_EXPR, type,
10825 build_int_cst (type, 2) , arg1));
10827 strict_overflow_p = false;
10828 if (TREE_CODE (arg1) == INTEGER_CST
10829 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10830 &strict_overflow_p)))
10832 if (strict_overflow_p)
10833 fold_overflow_warning (("assuming signed overflow does not "
10834 "occur when simplifying "
10835 "multiplication"),
10836 WARN_STRICT_OVERFLOW_MISC);
10837 return fold_convert_loc (loc, type, tem);
10840 /* Optimize z * conj(z) for integer complex numbers. */
10841 if (TREE_CODE (arg0) == CONJ_EXPR
10842 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10843 return fold_mult_zconjz (loc, type, arg1);
10844 if (TREE_CODE (arg1) == CONJ_EXPR
10845 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10846 return fold_mult_zconjz (loc, type, arg0);
10848 else
10850 /* Maybe fold x * 0 to 0. The expressions aren't the same
10851 when x is NaN, since x * 0 is also NaN. Nor are they the
10852 same in modes with signed zeros, since multiplying a
10853 negative value by 0 gives -0, not +0. */
10854 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10855 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10856 && real_zerop (arg1))
10857 return omit_one_operand_loc (loc, type, arg1, arg0);
10858 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10859 Likewise for complex arithmetic with signed zeros. */
10860 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10861 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10862 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10863 && real_onep (arg1))
10864 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10866 /* Transform x * -1.0 into -x. */
10867 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10868 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10869 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10870 && real_minus_onep (arg1))
10871 return fold_convert_loc (loc, type, negate_expr (arg0));
10873 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10874 the result for floating point types due to rounding so it is applied
10875 only if -fassociative-math was specify. */
10876 if (flag_associative_math
10877 && TREE_CODE (arg0) == RDIV_EXPR
10878 && TREE_CODE (arg1) == REAL_CST
10879 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10881 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10882 arg1);
10883 if (tem)
10884 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10885 TREE_OPERAND (arg0, 1));
10888 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10889 if (operand_equal_p (arg0, arg1, 0))
10891 tree tem = fold_strip_sign_ops (arg0);
10892 if (tem != NULL_TREE)
10894 tem = fold_convert_loc (loc, type, tem);
10895 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10899 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10900 This is not the same for NaNs or if signed zeros are
10901 involved. */
10902 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10903 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10904 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10905 && TREE_CODE (arg1) == COMPLEX_CST
10906 && real_zerop (TREE_REALPART (arg1)))
10908 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10909 if (real_onep (TREE_IMAGPART (arg1)))
10910 return
10911 fold_build2_loc (loc, COMPLEX_EXPR, type,
10912 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10913 rtype, arg0)),
10914 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10915 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10916 return
10917 fold_build2_loc (loc, COMPLEX_EXPR, type,
10918 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10919 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10920 rtype, arg0)));
10923 /* Optimize z * conj(z) for floating point complex numbers.
10924 Guarded by flag_unsafe_math_optimizations as non-finite
10925 imaginary components don't produce scalar results. */
10926 if (flag_unsafe_math_optimizations
10927 && TREE_CODE (arg0) == CONJ_EXPR
10928 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10929 return fold_mult_zconjz (loc, type, arg1);
10930 if (flag_unsafe_math_optimizations
10931 && TREE_CODE (arg1) == CONJ_EXPR
10932 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10933 return fold_mult_zconjz (loc, type, arg0);
10935 if (flag_unsafe_math_optimizations)
10937 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10938 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10940 /* Optimizations of root(...)*root(...). */
10941 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10943 tree rootfn, arg;
10944 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10945 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10947 /* Optimize sqrt(x)*sqrt(x) as x. */
10948 if (BUILTIN_SQRT_P (fcode0)
10949 && operand_equal_p (arg00, arg10, 0)
10950 && ! HONOR_SNANS (TYPE_MODE (type)))
10951 return arg00;
10953 /* Optimize root(x)*root(y) as root(x*y). */
10954 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10955 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10956 return build_call_expr_loc (loc, rootfn, 1, arg);
10959 /* Optimize expN(x)*expN(y) as expN(x+y). */
10960 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10962 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10963 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10964 CALL_EXPR_ARG (arg0, 0),
10965 CALL_EXPR_ARG (arg1, 0));
10966 return build_call_expr_loc (loc, expfn, 1, arg);
10969 /* Optimizations of pow(...)*pow(...). */
10970 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10971 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10972 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10974 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10975 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10976 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10977 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10979 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10980 if (operand_equal_p (arg01, arg11, 0))
10982 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10983 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10984 arg00, arg10);
10985 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10988 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10989 if (operand_equal_p (arg00, arg10, 0))
10991 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10992 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10993 arg01, arg11);
10994 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10998 /* Optimize tan(x)*cos(x) as sin(x). */
10999 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11000 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11001 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11002 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11003 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11004 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11005 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11006 CALL_EXPR_ARG (arg1, 0), 0))
11008 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11010 if (sinfn != NULL_TREE)
11011 return build_call_expr_loc (loc, sinfn, 1,
11012 CALL_EXPR_ARG (arg0, 0));
11015 /* Optimize x*pow(x,c) as pow(x,c+1). */
11016 if (fcode1 == BUILT_IN_POW
11017 || fcode1 == BUILT_IN_POWF
11018 || fcode1 == BUILT_IN_POWL)
11020 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11021 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11022 if (TREE_CODE (arg11) == REAL_CST
11023 && !TREE_OVERFLOW (arg11)
11024 && operand_equal_p (arg0, arg10, 0))
11026 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11027 REAL_VALUE_TYPE c;
11028 tree arg;
11030 c = TREE_REAL_CST (arg11);
11031 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11032 arg = build_real (type, c);
11033 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11037 /* Optimize pow(x,c)*x as pow(x,c+1). */
11038 if (fcode0 == BUILT_IN_POW
11039 || fcode0 == BUILT_IN_POWF
11040 || fcode0 == BUILT_IN_POWL)
11042 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11043 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11044 if (TREE_CODE (arg01) == REAL_CST
11045 && !TREE_OVERFLOW (arg01)
11046 && operand_equal_p (arg1, arg00, 0))
11048 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11049 REAL_VALUE_TYPE c;
11050 tree arg;
11052 c = TREE_REAL_CST (arg01);
11053 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11054 arg = build_real (type, c);
11055 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11059 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11060 if (!in_gimple_form
11061 && optimize
11062 && operand_equal_p (arg0, arg1, 0))
11064 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11066 if (powfn)
11068 tree arg = build_real (type, dconst2);
11069 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11074 goto associate;
11076 case BIT_IOR_EXPR:
11077 bit_ior:
11078 if (integer_all_onesp (arg1))
11079 return omit_one_operand_loc (loc, type, arg1, arg0);
11080 if (integer_zerop (arg1))
11081 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11082 if (operand_equal_p (arg0, arg1, 0))
11083 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11085 /* ~X | X is -1. */
11086 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11087 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11089 t1 = build_zero_cst (type);
11090 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11091 return omit_one_operand_loc (loc, type, t1, arg1);
11094 /* X | ~X is -1. */
11095 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11096 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11098 t1 = build_zero_cst (type);
11099 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11100 return omit_one_operand_loc (loc, type, t1, arg0);
11103 /* Canonicalize (X & C1) | C2. */
11104 if (TREE_CODE (arg0) == BIT_AND_EXPR
11105 && TREE_CODE (arg1) == INTEGER_CST
11106 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11108 double_int c1, c2, c3, msk;
11109 int width = TYPE_PRECISION (type), w;
11110 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11111 c2 = tree_to_double_int (arg1);
11113 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11114 if ((c1 & c2) == c1)
11115 return omit_one_operand_loc (loc, type, arg1,
11116 TREE_OPERAND (arg0, 0));
11118 msk = double_int::mask (width);
11120 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11121 if (msk.and_not (c1 | c2).is_zero ())
11122 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11123 TREE_OPERAND (arg0, 0), arg1);
11125 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11126 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11127 mode which allows further optimizations. */
11128 c1 &= msk;
11129 c2 &= msk;
11130 c3 = c1.and_not (c2);
11131 for (w = BITS_PER_UNIT;
11132 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11133 w <<= 1)
11135 unsigned HOST_WIDE_INT mask
11136 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11137 if (((c1.low | c2.low) & mask) == mask
11138 && (c1.low & ~mask) == 0 && c1.high == 0)
11140 c3 = double_int::from_uhwi (mask);
11141 break;
11144 if (c3 != c1)
11145 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11146 fold_build2_loc (loc, BIT_AND_EXPR, type,
11147 TREE_OPERAND (arg0, 0),
11148 double_int_to_tree (type,
11149 c3)),
11150 arg1);
11153 /* (X & Y) | Y is (X, Y). */
11154 if (TREE_CODE (arg0) == BIT_AND_EXPR
11155 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11156 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11157 /* (X & Y) | X is (Y, X). */
11158 if (TREE_CODE (arg0) == BIT_AND_EXPR
11159 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11160 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11161 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11162 /* X | (X & Y) is (Y, X). */
11163 if (TREE_CODE (arg1) == BIT_AND_EXPR
11164 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11165 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11166 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11167 /* X | (Y & X) is (Y, X). */
11168 if (TREE_CODE (arg1) == BIT_AND_EXPR
11169 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11170 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11171 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11173 /* (X & ~Y) | (~X & Y) is X ^ Y */
11174 if (TREE_CODE (arg0) == BIT_AND_EXPR
11175 && TREE_CODE (arg1) == BIT_AND_EXPR)
11177 tree a0, a1, l0, l1, n0, n1;
11179 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11180 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11182 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11183 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11185 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11186 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11188 if ((operand_equal_p (n0, a0, 0)
11189 && operand_equal_p (n1, a1, 0))
11190 || (operand_equal_p (n0, a1, 0)
11191 && operand_equal_p (n1, a0, 0)))
11192 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11195 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11196 if (t1 != NULL_TREE)
11197 return t1;
11199 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11201 This results in more efficient code for machines without a NAND
11202 instruction. Combine will canonicalize to the first form
11203 which will allow use of NAND instructions provided by the
11204 backend if they exist. */
11205 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11206 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11208 return
11209 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11210 build2 (BIT_AND_EXPR, type,
11211 fold_convert_loc (loc, type,
11212 TREE_OPERAND (arg0, 0)),
11213 fold_convert_loc (loc, type,
11214 TREE_OPERAND (arg1, 0))));
11217 /* See if this can be simplified into a rotate first. If that
11218 is unsuccessful continue in the association code. */
11219 goto bit_rotate;
11221 case BIT_XOR_EXPR:
11222 if (integer_zerop (arg1))
11223 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11224 if (integer_all_onesp (arg1))
11225 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11226 if (operand_equal_p (arg0, arg1, 0))
11227 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11229 /* ~X ^ X is -1. */
11230 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11231 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11233 t1 = build_zero_cst (type);
11234 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11235 return omit_one_operand_loc (loc, type, t1, arg1);
11238 /* X ^ ~X is -1. */
11239 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11240 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11242 t1 = build_zero_cst (type);
11243 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11244 return omit_one_operand_loc (loc, type, t1, arg0);
11247 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11248 with a constant, and the two constants have no bits in common,
11249 we should treat this as a BIT_IOR_EXPR since this may produce more
11250 simplifications. */
11251 if (TREE_CODE (arg0) == BIT_AND_EXPR
11252 && TREE_CODE (arg1) == BIT_AND_EXPR
11253 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11254 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11255 && integer_zerop (const_binop (BIT_AND_EXPR,
11256 TREE_OPERAND (arg0, 1),
11257 TREE_OPERAND (arg1, 1))))
11259 code = BIT_IOR_EXPR;
11260 goto bit_ior;
11263 /* (X | Y) ^ X -> Y & ~ X*/
11264 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11265 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11267 tree t2 = TREE_OPERAND (arg0, 1);
11268 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11269 arg1);
11270 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11271 fold_convert_loc (loc, type, t2),
11272 fold_convert_loc (loc, type, t1));
11273 return t1;
11276 /* (Y | X) ^ X -> Y & ~ X*/
11277 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11278 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11280 tree t2 = TREE_OPERAND (arg0, 0);
11281 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11282 arg1);
11283 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11284 fold_convert_loc (loc, type, t2),
11285 fold_convert_loc (loc, type, t1));
11286 return t1;
11289 /* X ^ (X | Y) -> Y & ~ X*/
11290 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11291 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11293 tree t2 = TREE_OPERAND (arg1, 1);
11294 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11295 arg0);
11296 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11297 fold_convert_loc (loc, type, t2),
11298 fold_convert_loc (loc, type, t1));
11299 return t1;
11302 /* X ^ (Y | X) -> Y & ~ X*/
11303 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11304 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11306 tree t2 = TREE_OPERAND (arg1, 0);
11307 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11308 arg0);
11309 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11310 fold_convert_loc (loc, type, t2),
11311 fold_convert_loc (loc, type, t1));
11312 return t1;
11315 /* Convert ~X ^ ~Y to X ^ Y. */
11316 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11317 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11318 return fold_build2_loc (loc, code, type,
11319 fold_convert_loc (loc, type,
11320 TREE_OPERAND (arg0, 0)),
11321 fold_convert_loc (loc, type,
11322 TREE_OPERAND (arg1, 0)));
11324 /* Convert ~X ^ C to X ^ ~C. */
11325 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11326 && TREE_CODE (arg1) == INTEGER_CST)
11327 return fold_build2_loc (loc, code, type,
11328 fold_convert_loc (loc, type,
11329 TREE_OPERAND (arg0, 0)),
11330 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11332 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11333 if (TREE_CODE (arg0) == BIT_AND_EXPR
11334 && integer_onep (TREE_OPERAND (arg0, 1))
11335 && integer_onep (arg1))
11336 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11337 build_zero_cst (TREE_TYPE (arg0)));
11339 /* Fold (X & Y) ^ Y as ~X & Y. */
11340 if (TREE_CODE (arg0) == BIT_AND_EXPR
11341 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11343 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11344 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11345 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11346 fold_convert_loc (loc, type, arg1));
11348 /* Fold (X & Y) ^ X as ~Y & X. */
11349 if (TREE_CODE (arg0) == BIT_AND_EXPR
11350 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11351 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11353 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11354 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11355 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11356 fold_convert_loc (loc, type, arg1));
11358 /* Fold X ^ (X & Y) as X & ~Y. */
11359 if (TREE_CODE (arg1) == BIT_AND_EXPR
11360 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11362 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11363 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11364 fold_convert_loc (loc, type, arg0),
11365 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11367 /* Fold X ^ (Y & X) as ~Y & X. */
11368 if (TREE_CODE (arg1) == BIT_AND_EXPR
11369 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11370 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11372 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11373 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11374 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11375 fold_convert_loc (loc, type, arg0));
11378 /* See if this can be simplified into a rotate first. If that
11379 is unsuccessful continue in the association code. */
11380 goto bit_rotate;
11382 case BIT_AND_EXPR:
11383 if (integer_all_onesp (arg1))
11384 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11385 if (integer_zerop (arg1))
11386 return omit_one_operand_loc (loc, type, arg1, arg0);
11387 if (operand_equal_p (arg0, arg1, 0))
11388 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11390 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11391 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11392 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11393 || (TREE_CODE (arg0) == EQ_EXPR
11394 && integer_zerop (TREE_OPERAND (arg0, 1))))
11395 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11396 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11398 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11399 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11400 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11401 || (TREE_CODE (arg1) == EQ_EXPR
11402 && integer_zerop (TREE_OPERAND (arg1, 1))))
11403 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11404 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11406 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11407 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11408 && TREE_CODE (arg1) == INTEGER_CST
11409 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11411 tree tmp1 = fold_convert_loc (loc, type, arg1);
11412 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11413 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11414 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11415 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11416 return
11417 fold_convert_loc (loc, type,
11418 fold_build2_loc (loc, BIT_IOR_EXPR,
11419 type, tmp2, tmp3));
11422 /* (X | Y) & Y is (X, Y). */
11423 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11424 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11425 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11426 /* (X | Y) & X is (Y, X). */
11427 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11428 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11429 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11430 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11431 /* X & (X | Y) is (Y, X). */
11432 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11433 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11434 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11435 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11436 /* X & (Y | X) is (Y, X). */
11437 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11438 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11439 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11440 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11442 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11443 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11444 && integer_onep (TREE_OPERAND (arg0, 1))
11445 && integer_onep (arg1))
11447 tree tem2;
11448 tem = TREE_OPERAND (arg0, 0);
11449 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11450 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11451 tem, tem2);
11452 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11453 build_zero_cst (TREE_TYPE (tem)));
11455 /* Fold ~X & 1 as (X & 1) == 0. */
11456 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11457 && integer_onep (arg1))
11459 tree tem2;
11460 tem = TREE_OPERAND (arg0, 0);
11461 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11462 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11463 tem, tem2);
11464 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11465 build_zero_cst (TREE_TYPE (tem)));
11467 /* Fold !X & 1 as X == 0. */
11468 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11469 && integer_onep (arg1))
11471 tem = TREE_OPERAND (arg0, 0);
11472 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11473 build_zero_cst (TREE_TYPE (tem)));
11476 /* Fold (X ^ Y) & Y as ~X & Y. */
11477 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11478 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11480 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11481 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11482 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11483 fold_convert_loc (loc, type, arg1));
11485 /* Fold (X ^ Y) & X as ~Y & X. */
11486 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11487 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11488 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11490 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11491 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11492 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11493 fold_convert_loc (loc, type, arg1));
11495 /* Fold X & (X ^ Y) as X & ~Y. */
11496 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11497 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11499 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11500 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11501 fold_convert_loc (loc, type, arg0),
11502 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11504 /* Fold X & (Y ^ X) as ~Y & X. */
11505 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11506 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11507 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11509 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11510 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11511 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11512 fold_convert_loc (loc, type, arg0));
11515 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11516 multiple of 1 << CST. */
11517 if (TREE_CODE (arg1) == INTEGER_CST)
11519 double_int cst1 = tree_to_double_int (arg1);
11520 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11521 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11522 if ((cst1 & ncst1) == ncst1
11523 && multiple_of_p (type, arg0,
11524 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11525 return fold_convert_loc (loc, type, arg0);
11528 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11529 bits from CST2. */
11530 if (TREE_CODE (arg1) == INTEGER_CST
11531 && TREE_CODE (arg0) == MULT_EXPR
11532 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11534 int arg1tz
11535 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11536 if (arg1tz > 0)
11538 double_int arg1mask, masked;
11539 arg1mask = ~double_int::mask (arg1tz);
11540 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11541 TYPE_UNSIGNED (type));
11542 masked = arg1mask & tree_to_double_int (arg1);
11543 if (masked.is_zero ())
11544 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11545 arg0, arg1);
11546 else if (masked != tree_to_double_int (arg1))
11547 return fold_build2_loc (loc, code, type, op0,
11548 double_int_to_tree (type, masked));
11552 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11553 ((A & N) + B) & M -> (A + B) & M
11554 Similarly if (N & M) == 0,
11555 ((A | N) + B) & M -> (A + B) & M
11556 and for - instead of + (or unary - instead of +)
11557 and/or ^ instead of |.
11558 If B is constant and (B & M) == 0, fold into A & M. */
11559 if (host_integerp (arg1, 1))
11561 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11562 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11563 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11564 && (TREE_CODE (arg0) == PLUS_EXPR
11565 || TREE_CODE (arg0) == MINUS_EXPR
11566 || TREE_CODE (arg0) == NEGATE_EXPR)
11567 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11568 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11570 tree pmop[2];
11571 int which = 0;
11572 unsigned HOST_WIDE_INT cst0;
11574 /* Now we know that arg0 is (C + D) or (C - D) or
11575 -C and arg1 (M) is == (1LL << cst) - 1.
11576 Store C into PMOP[0] and D into PMOP[1]. */
11577 pmop[0] = TREE_OPERAND (arg0, 0);
11578 pmop[1] = NULL;
11579 if (TREE_CODE (arg0) != NEGATE_EXPR)
11581 pmop[1] = TREE_OPERAND (arg0, 1);
11582 which = 1;
11585 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11586 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11587 & cst1) != cst1)
11588 which = -1;
11590 for (; which >= 0; which--)
11591 switch (TREE_CODE (pmop[which]))
11593 case BIT_AND_EXPR:
11594 case BIT_IOR_EXPR:
11595 case BIT_XOR_EXPR:
11596 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11597 != INTEGER_CST)
11598 break;
11599 /* tree_low_cst not used, because we don't care about
11600 the upper bits. */
11601 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11602 cst0 &= cst1;
11603 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11605 if (cst0 != cst1)
11606 break;
11608 else if (cst0 != 0)
11609 break;
11610 /* If C or D is of the form (A & N) where
11611 (N & M) == M, or of the form (A | N) or
11612 (A ^ N) where (N & M) == 0, replace it with A. */
11613 pmop[which] = TREE_OPERAND (pmop[which], 0);
11614 break;
11615 case INTEGER_CST:
11616 /* If C or D is a N where (N & M) == 0, it can be
11617 omitted (assumed 0). */
11618 if ((TREE_CODE (arg0) == PLUS_EXPR
11619 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11620 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11621 pmop[which] = NULL;
11622 break;
11623 default:
11624 break;
11627 /* Only build anything new if we optimized one or both arguments
11628 above. */
11629 if (pmop[0] != TREE_OPERAND (arg0, 0)
11630 || (TREE_CODE (arg0) != NEGATE_EXPR
11631 && pmop[1] != TREE_OPERAND (arg0, 1)))
11633 tree utype = TREE_TYPE (arg0);
11634 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11636 /* Perform the operations in a type that has defined
11637 overflow behavior. */
11638 utype = unsigned_type_for (TREE_TYPE (arg0));
11639 if (pmop[0] != NULL)
11640 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11641 if (pmop[1] != NULL)
11642 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11645 if (TREE_CODE (arg0) == NEGATE_EXPR)
11646 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11647 else if (TREE_CODE (arg0) == PLUS_EXPR)
11649 if (pmop[0] != NULL && pmop[1] != NULL)
11650 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11651 pmop[0], pmop[1]);
11652 else if (pmop[0] != NULL)
11653 tem = pmop[0];
11654 else if (pmop[1] != NULL)
11655 tem = pmop[1];
11656 else
11657 return build_int_cst (type, 0);
11659 else if (pmop[0] == NULL)
11660 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11661 else
11662 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11663 pmop[0], pmop[1]);
11664 /* TEM is now the new binary +, - or unary - replacement. */
11665 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11666 fold_convert_loc (loc, utype, arg1));
11667 return fold_convert_loc (loc, type, tem);
11672 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11673 if (t1 != NULL_TREE)
11674 return t1;
11675 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11676 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11677 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11679 unsigned int prec
11680 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11682 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11683 && (~TREE_INT_CST_LOW (arg1)
11684 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11685 return
11686 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11689 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11691 This results in more efficient code for machines without a NOR
11692 instruction. Combine will canonicalize to the first form
11693 which will allow use of NOR instructions provided by the
11694 backend if they exist. */
11695 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11696 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11698 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11699 build2 (BIT_IOR_EXPR, type,
11700 fold_convert_loc (loc, type,
11701 TREE_OPERAND (arg0, 0)),
11702 fold_convert_loc (loc, type,
11703 TREE_OPERAND (arg1, 0))));
11706 /* If arg0 is derived from the address of an object or function, we may
11707 be able to fold this expression using the object or function's
11708 alignment. */
11709 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11711 unsigned HOST_WIDE_INT modulus, residue;
11712 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11714 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11715 integer_onep (arg1));
11717 /* This works because modulus is a power of 2. If this weren't the
11718 case, we'd have to replace it by its greatest power-of-2
11719 divisor: modulus & -modulus. */
11720 if (low < modulus)
11721 return build_int_cst (type, residue & low);
11724 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11725 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11726 if the new mask might be further optimized. */
11727 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11728 || TREE_CODE (arg0) == RSHIFT_EXPR)
11729 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11730 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11731 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11732 < TYPE_PRECISION (TREE_TYPE (arg0))
11733 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11734 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11736 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11737 unsigned HOST_WIDE_INT mask
11738 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11739 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11740 tree shift_type = TREE_TYPE (arg0);
11742 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11743 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11744 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11745 && TYPE_PRECISION (TREE_TYPE (arg0))
11746 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11748 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11749 tree arg00 = TREE_OPERAND (arg0, 0);
11750 /* See if more bits can be proven as zero because of
11751 zero extension. */
11752 if (TREE_CODE (arg00) == NOP_EXPR
11753 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11755 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11756 if (TYPE_PRECISION (inner_type)
11757 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11758 && TYPE_PRECISION (inner_type) < prec)
11760 prec = TYPE_PRECISION (inner_type);
11761 /* See if we can shorten the right shift. */
11762 if (shiftc < prec)
11763 shift_type = inner_type;
11766 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11767 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11768 zerobits <<= prec - shiftc;
11769 /* For arithmetic shift if sign bit could be set, zerobits
11770 can contain actually sign bits, so no transformation is
11771 possible, unless MASK masks them all away. In that
11772 case the shift needs to be converted into logical shift. */
11773 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11774 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11776 if ((mask & zerobits) == 0)
11777 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11778 else
11779 zerobits = 0;
11783 /* ((X << 16) & 0xff00) is (X, 0). */
11784 if ((mask & zerobits) == mask)
11785 return omit_one_operand_loc (loc, type,
11786 build_int_cst (type, 0), arg0);
11788 newmask = mask | zerobits;
11789 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11791 unsigned int prec;
11793 /* Only do the transformation if NEWMASK is some integer
11794 mode's mask. */
11795 for (prec = BITS_PER_UNIT;
11796 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11797 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11798 break;
11799 if (prec < HOST_BITS_PER_WIDE_INT
11800 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11802 tree newmaskt;
11804 if (shift_type != TREE_TYPE (arg0))
11806 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11807 fold_convert_loc (loc, shift_type,
11808 TREE_OPERAND (arg0, 0)),
11809 TREE_OPERAND (arg0, 1));
11810 tem = fold_convert_loc (loc, type, tem);
11812 else
11813 tem = op0;
11814 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11815 if (!tree_int_cst_equal (newmaskt, arg1))
11816 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11821 goto associate;
11823 case RDIV_EXPR:
11824 /* Don't touch a floating-point divide by zero unless the mode
11825 of the constant can represent infinity. */
11826 if (TREE_CODE (arg1) == REAL_CST
11827 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11828 && real_zerop (arg1))
11829 return NULL_TREE;
11831 /* Optimize A / A to 1.0 if we don't care about
11832 NaNs or Infinities. Skip the transformation
11833 for non-real operands. */
11834 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11835 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11836 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11837 && operand_equal_p (arg0, arg1, 0))
11839 tree r = build_real (TREE_TYPE (arg0), dconst1);
11841 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11844 /* The complex version of the above A / A optimization. */
11845 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11846 && operand_equal_p (arg0, arg1, 0))
11848 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11849 if (! HONOR_NANS (TYPE_MODE (elem_type))
11850 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11852 tree r = build_real (elem_type, dconst1);
11853 /* omit_two_operands will call fold_convert for us. */
11854 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11858 /* (-A) / (-B) -> A / B */
11859 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11860 return fold_build2_loc (loc, RDIV_EXPR, type,
11861 TREE_OPERAND (arg0, 0),
11862 negate_expr (arg1));
11863 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11864 return fold_build2_loc (loc, RDIV_EXPR, type,
11865 negate_expr (arg0),
11866 TREE_OPERAND (arg1, 0));
11868 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11869 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11870 && real_onep (arg1))
11871 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11873 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11874 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11875 && real_minus_onep (arg1))
11876 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11877 negate_expr (arg0)));
11879 /* If ARG1 is a constant, we can convert this to a multiply by the
11880 reciprocal. This does not have the same rounding properties,
11881 so only do this if -freciprocal-math. We can actually
11882 always safely do it if ARG1 is a power of two, but it's hard to
11883 tell if it is or not in a portable manner. */
11884 if (optimize
11885 && (TREE_CODE (arg1) == REAL_CST
11886 || (TREE_CODE (arg1) == COMPLEX_CST
11887 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11888 || (TREE_CODE (arg1) == VECTOR_CST
11889 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11891 if (flag_reciprocal_math
11892 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11893 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11894 /* Find the reciprocal if optimizing and the result is exact.
11895 TODO: Complex reciprocal not implemented. */
11896 if (TREE_CODE (arg1) != COMPLEX_CST)
11898 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11900 if (inverse)
11901 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11904 /* Convert A/B/C to A/(B*C). */
11905 if (flag_reciprocal_math
11906 && TREE_CODE (arg0) == RDIV_EXPR)
11907 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11908 fold_build2_loc (loc, MULT_EXPR, type,
11909 TREE_OPERAND (arg0, 1), arg1));
11911 /* Convert A/(B/C) to (A/B)*C. */
11912 if (flag_reciprocal_math
11913 && TREE_CODE (arg1) == RDIV_EXPR)
11914 return fold_build2_loc (loc, MULT_EXPR, type,
11915 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11916 TREE_OPERAND (arg1, 0)),
11917 TREE_OPERAND (arg1, 1));
11919 /* Convert C1/(X*C2) into (C1/C2)/X. */
11920 if (flag_reciprocal_math
11921 && TREE_CODE (arg1) == MULT_EXPR
11922 && TREE_CODE (arg0) == REAL_CST
11923 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11925 tree tem = const_binop (RDIV_EXPR, arg0,
11926 TREE_OPERAND (arg1, 1));
11927 if (tem)
11928 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11929 TREE_OPERAND (arg1, 0));
11932 if (flag_unsafe_math_optimizations)
11934 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11935 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11937 /* Optimize sin(x)/cos(x) as tan(x). */
11938 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11939 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11940 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11941 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11942 CALL_EXPR_ARG (arg1, 0), 0))
11944 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11946 if (tanfn != NULL_TREE)
11947 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11950 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11951 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11952 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11953 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11954 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11955 CALL_EXPR_ARG (arg1, 0), 0))
11957 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11959 if (tanfn != NULL_TREE)
11961 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11962 CALL_EXPR_ARG (arg0, 0));
11963 return fold_build2_loc (loc, RDIV_EXPR, type,
11964 build_real (type, dconst1), tmp);
11968 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11969 NaNs or Infinities. */
11970 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11971 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11972 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11974 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11975 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11977 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11978 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11979 && operand_equal_p (arg00, arg01, 0))
11981 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11983 if (cosfn != NULL_TREE)
11984 return build_call_expr_loc (loc, cosfn, 1, arg00);
11988 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11989 NaNs or Infinities. */
11990 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11991 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11992 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11994 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11995 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11997 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11998 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11999 && operand_equal_p (arg00, arg01, 0))
12001 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12003 if (cosfn != NULL_TREE)
12005 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12006 return fold_build2_loc (loc, RDIV_EXPR, type,
12007 build_real (type, dconst1),
12008 tmp);
12013 /* Optimize pow(x,c)/x as pow(x,c-1). */
12014 if (fcode0 == BUILT_IN_POW
12015 || fcode0 == BUILT_IN_POWF
12016 || fcode0 == BUILT_IN_POWL)
12018 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12019 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12020 if (TREE_CODE (arg01) == REAL_CST
12021 && !TREE_OVERFLOW (arg01)
12022 && operand_equal_p (arg1, arg00, 0))
12024 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12025 REAL_VALUE_TYPE c;
12026 tree arg;
12028 c = TREE_REAL_CST (arg01);
12029 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12030 arg = build_real (type, c);
12031 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12035 /* Optimize a/root(b/c) into a*root(c/b). */
12036 if (BUILTIN_ROOT_P (fcode1))
12038 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12040 if (TREE_CODE (rootarg) == RDIV_EXPR)
12042 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12043 tree b = TREE_OPERAND (rootarg, 0);
12044 tree c = TREE_OPERAND (rootarg, 1);
12046 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12048 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12049 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12053 /* Optimize x/expN(y) into x*expN(-y). */
12054 if (BUILTIN_EXPONENT_P (fcode1))
12056 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12057 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12058 arg1 = build_call_expr_loc (loc,
12059 expfn, 1,
12060 fold_convert_loc (loc, type, arg));
12061 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12064 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12065 if (fcode1 == BUILT_IN_POW
12066 || fcode1 == BUILT_IN_POWF
12067 || fcode1 == BUILT_IN_POWL)
12069 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12070 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12071 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12072 tree neg11 = fold_convert_loc (loc, type,
12073 negate_expr (arg11));
12074 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12075 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12078 return NULL_TREE;
12080 case TRUNC_DIV_EXPR:
12081 /* Optimize (X & (-A)) / A where A is a power of 2,
12082 to X >> log2(A) */
12083 if (TREE_CODE (arg0) == BIT_AND_EXPR
12084 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12085 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12087 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12088 arg1, TREE_OPERAND (arg0, 1));
12089 if (sum && integer_zerop (sum)) {
12090 unsigned long pow2;
12092 if (TREE_INT_CST_LOW (arg1))
12093 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12094 else
12095 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12096 + HOST_BITS_PER_WIDE_INT;
12098 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12099 TREE_OPERAND (arg0, 0),
12100 build_int_cst (integer_type_node, pow2));
12104 /* Fall through */
12106 case FLOOR_DIV_EXPR:
12107 /* Simplify A / (B << N) where A and B are positive and B is
12108 a power of 2, to A >> (N + log2(B)). */
12109 strict_overflow_p = false;
12110 if (TREE_CODE (arg1) == LSHIFT_EXPR
12111 && (TYPE_UNSIGNED (type)
12112 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12114 tree sval = TREE_OPERAND (arg1, 0);
12115 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12117 tree sh_cnt = TREE_OPERAND (arg1, 1);
12118 unsigned long pow2;
12120 if (TREE_INT_CST_LOW (sval))
12121 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12122 else
12123 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12124 + HOST_BITS_PER_WIDE_INT;
12126 if (strict_overflow_p)
12127 fold_overflow_warning (("assuming signed overflow does not "
12128 "occur when simplifying A / (B << N)"),
12129 WARN_STRICT_OVERFLOW_MISC);
12131 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12132 sh_cnt,
12133 build_int_cst (TREE_TYPE (sh_cnt),
12134 pow2));
12135 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12136 fold_convert_loc (loc, type, arg0), sh_cnt);
12140 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12141 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12142 if (INTEGRAL_TYPE_P (type)
12143 && TYPE_UNSIGNED (type)
12144 && code == FLOOR_DIV_EXPR)
12145 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12147 /* Fall through */
12149 case ROUND_DIV_EXPR:
12150 case CEIL_DIV_EXPR:
12151 case EXACT_DIV_EXPR:
12152 if (integer_onep (arg1))
12153 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12154 if (integer_zerop (arg1))
12155 return NULL_TREE;
12156 /* X / -1 is -X. */
12157 if (!TYPE_UNSIGNED (type)
12158 && TREE_CODE (arg1) == INTEGER_CST
12159 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12160 && TREE_INT_CST_HIGH (arg1) == -1)
12161 return fold_convert_loc (loc, type, negate_expr (arg0));
12163 /* Convert -A / -B to A / B when the type is signed and overflow is
12164 undefined. */
12165 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12166 && TREE_CODE (arg0) == NEGATE_EXPR
12167 && negate_expr_p (arg1))
12169 if (INTEGRAL_TYPE_P (type))
12170 fold_overflow_warning (("assuming signed overflow does not occur "
12171 "when distributing negation across "
12172 "division"),
12173 WARN_STRICT_OVERFLOW_MISC);
12174 return fold_build2_loc (loc, code, type,
12175 fold_convert_loc (loc, type,
12176 TREE_OPERAND (arg0, 0)),
12177 fold_convert_loc (loc, type,
12178 negate_expr (arg1)));
12180 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12181 && TREE_CODE (arg1) == NEGATE_EXPR
12182 && negate_expr_p (arg0))
12184 if (INTEGRAL_TYPE_P (type))
12185 fold_overflow_warning (("assuming signed overflow does not occur "
12186 "when distributing negation across "
12187 "division"),
12188 WARN_STRICT_OVERFLOW_MISC);
12189 return fold_build2_loc (loc, code, type,
12190 fold_convert_loc (loc, type,
12191 negate_expr (arg0)),
12192 fold_convert_loc (loc, type,
12193 TREE_OPERAND (arg1, 0)));
12196 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12197 operation, EXACT_DIV_EXPR.
12199 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12200 At one time others generated faster code, it's not clear if they do
12201 after the last round to changes to the DIV code in expmed.c. */
12202 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12203 && multiple_of_p (type, arg0, arg1))
12204 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12206 strict_overflow_p = false;
12207 if (TREE_CODE (arg1) == INTEGER_CST
12208 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12209 &strict_overflow_p)))
12211 if (strict_overflow_p)
12212 fold_overflow_warning (("assuming signed overflow does not occur "
12213 "when simplifying division"),
12214 WARN_STRICT_OVERFLOW_MISC);
12215 return fold_convert_loc (loc, type, tem);
12218 return NULL_TREE;
12220 case CEIL_MOD_EXPR:
12221 case FLOOR_MOD_EXPR:
12222 case ROUND_MOD_EXPR:
12223 case TRUNC_MOD_EXPR:
12224 /* X % 1 is always zero, but be sure to preserve any side
12225 effects in X. */
12226 if (integer_onep (arg1))
12227 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12229 /* X % 0, return X % 0 unchanged so that we can get the
12230 proper warnings and errors. */
12231 if (integer_zerop (arg1))
12232 return NULL_TREE;
12234 /* 0 % X is always zero, but be sure to preserve any side
12235 effects in X. Place this after checking for X == 0. */
12236 if (integer_zerop (arg0))
12237 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12239 /* X % -1 is zero. */
12240 if (!TYPE_UNSIGNED (type)
12241 && TREE_CODE (arg1) == INTEGER_CST
12242 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12243 && TREE_INT_CST_HIGH (arg1) == -1)
12244 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12246 /* X % -C is the same as X % C. */
12247 if (code == TRUNC_MOD_EXPR
12248 && !TYPE_UNSIGNED (type)
12249 && TREE_CODE (arg1) == INTEGER_CST
12250 && !TREE_OVERFLOW (arg1)
12251 && TREE_INT_CST_HIGH (arg1) < 0
12252 && !TYPE_OVERFLOW_TRAPS (type)
12253 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12254 && !sign_bit_p (arg1, arg1))
12255 return fold_build2_loc (loc, code, type,
12256 fold_convert_loc (loc, type, arg0),
12257 fold_convert_loc (loc, type,
12258 negate_expr (arg1)));
12260 /* X % -Y is the same as X % Y. */
12261 if (code == TRUNC_MOD_EXPR
12262 && !TYPE_UNSIGNED (type)
12263 && TREE_CODE (arg1) == NEGATE_EXPR
12264 && !TYPE_OVERFLOW_TRAPS (type))
12265 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12266 fold_convert_loc (loc, type,
12267 TREE_OPERAND (arg1, 0)));
12269 strict_overflow_p = false;
12270 if (TREE_CODE (arg1) == INTEGER_CST
12271 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12272 &strict_overflow_p)))
12274 if (strict_overflow_p)
12275 fold_overflow_warning (("assuming signed overflow does not occur "
12276 "when simplifying modulus"),
12277 WARN_STRICT_OVERFLOW_MISC);
12278 return fold_convert_loc (loc, type, tem);
12281 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12282 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12283 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12284 && (TYPE_UNSIGNED (type)
12285 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12287 tree c = arg1;
12288 /* Also optimize A % (C << N) where C is a power of 2,
12289 to A & ((C << N) - 1). */
12290 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12291 c = TREE_OPERAND (arg1, 0);
12293 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12295 tree mask
12296 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12297 build_int_cst (TREE_TYPE (arg1), 1));
12298 if (strict_overflow_p)
12299 fold_overflow_warning (("assuming signed overflow does not "
12300 "occur when simplifying "
12301 "X % (power of two)"),
12302 WARN_STRICT_OVERFLOW_MISC);
12303 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12304 fold_convert_loc (loc, type, arg0),
12305 fold_convert_loc (loc, type, mask));
12309 return NULL_TREE;
12311 case LROTATE_EXPR:
12312 case RROTATE_EXPR:
12313 if (integer_all_onesp (arg0))
12314 return omit_one_operand_loc (loc, type, arg0, arg1);
12315 goto shift;
12317 case RSHIFT_EXPR:
12318 /* Optimize -1 >> x for arithmetic right shifts. */
12319 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12320 && tree_expr_nonnegative_p (arg1))
12321 return omit_one_operand_loc (loc, type, arg0, arg1);
12322 /* ... fall through ... */
12324 case LSHIFT_EXPR:
12325 shift:
12326 if (integer_zerop (arg1))
12327 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12328 if (integer_zerop (arg0))
12329 return omit_one_operand_loc (loc, type, arg0, arg1);
12331 /* Since negative shift count is not well-defined,
12332 don't try to compute it in the compiler. */
12333 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12334 return NULL_TREE;
12336 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12337 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12338 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12339 && host_integerp (TREE_OPERAND (arg0, 1), false)
12340 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12342 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12343 + TREE_INT_CST_LOW (arg1));
12345 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12346 being well defined. */
12347 if (low >= TYPE_PRECISION (type))
12349 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12350 low = low % TYPE_PRECISION (type);
12351 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12352 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12353 TREE_OPERAND (arg0, 0));
12354 else
12355 low = TYPE_PRECISION (type) - 1;
12358 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12359 build_int_cst (type, low));
12362 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12363 into x & ((unsigned)-1 >> c) for unsigned types. */
12364 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12365 || (TYPE_UNSIGNED (type)
12366 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12367 && host_integerp (arg1, false)
12368 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12369 && host_integerp (TREE_OPERAND (arg0, 1), false)
12370 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12372 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12373 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12374 tree lshift;
12375 tree arg00;
12377 if (low0 == low1)
12379 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12381 lshift = build_int_cst (type, -1);
12382 lshift = int_const_binop (code, lshift, arg1);
12384 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12388 /* Rewrite an LROTATE_EXPR by a constant into an
12389 RROTATE_EXPR by a new constant. */
12390 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12392 tree tem = build_int_cst (TREE_TYPE (arg1),
12393 TYPE_PRECISION (type));
12394 tem = const_binop (MINUS_EXPR, tem, arg1);
12395 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12398 /* If we have a rotate of a bit operation with the rotate count and
12399 the second operand of the bit operation both constant,
12400 permute the two operations. */
12401 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12402 && (TREE_CODE (arg0) == BIT_AND_EXPR
12403 || TREE_CODE (arg0) == BIT_IOR_EXPR
12404 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12406 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12407 fold_build2_loc (loc, code, type,
12408 TREE_OPERAND (arg0, 0), arg1),
12409 fold_build2_loc (loc, code, type,
12410 TREE_OPERAND (arg0, 1), arg1));
12412 /* Two consecutive rotates adding up to the precision of the
12413 type can be ignored. */
12414 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12415 && TREE_CODE (arg0) == RROTATE_EXPR
12416 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12417 && TREE_INT_CST_HIGH (arg1) == 0
12418 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12419 && ((TREE_INT_CST_LOW (arg1)
12420 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12421 == (unsigned int) TYPE_PRECISION (type)))
12422 return TREE_OPERAND (arg0, 0);
12424 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12425 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12426 if the latter can be further optimized. */
12427 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12428 && TREE_CODE (arg0) == BIT_AND_EXPR
12429 && TREE_CODE (arg1) == INTEGER_CST
12430 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12432 tree mask = fold_build2_loc (loc, code, type,
12433 fold_convert_loc (loc, type,
12434 TREE_OPERAND (arg0, 1)),
12435 arg1);
12436 tree shift = fold_build2_loc (loc, code, type,
12437 fold_convert_loc (loc, type,
12438 TREE_OPERAND (arg0, 0)),
12439 arg1);
12440 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12441 if (tem)
12442 return tem;
12445 return NULL_TREE;
12447 case MIN_EXPR:
12448 if (operand_equal_p (arg0, arg1, 0))
12449 return omit_one_operand_loc (loc, type, arg0, arg1);
12450 if (INTEGRAL_TYPE_P (type)
12451 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12452 return omit_one_operand_loc (loc, type, arg1, arg0);
12453 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12454 if (tem)
12455 return tem;
12456 goto associate;
12458 case MAX_EXPR:
12459 if (operand_equal_p (arg0, arg1, 0))
12460 return omit_one_operand_loc (loc, type, arg0, arg1);
12461 if (INTEGRAL_TYPE_P (type)
12462 && TYPE_MAX_VALUE (type)
12463 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12464 return omit_one_operand_loc (loc, type, arg1, arg0);
12465 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12466 if (tem)
12467 return tem;
12468 goto associate;
12470 case TRUTH_ANDIF_EXPR:
12471 /* Note that the operands of this must be ints
12472 and their values must be 0 or 1.
12473 ("true" is a fixed value perhaps depending on the language.) */
12474 /* If first arg is constant zero, return it. */
12475 if (integer_zerop (arg0))
12476 return fold_convert_loc (loc, type, arg0);
12477 case TRUTH_AND_EXPR:
12478 /* If either arg is constant true, drop it. */
12479 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12480 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12481 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12482 /* Preserve sequence points. */
12483 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12484 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12485 /* If second arg is constant zero, result is zero, but first arg
12486 must be evaluated. */
12487 if (integer_zerop (arg1))
12488 return omit_one_operand_loc (loc, type, arg1, arg0);
12489 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12490 case will be handled here. */
12491 if (integer_zerop (arg0))
12492 return omit_one_operand_loc (loc, type, arg0, arg1);
12494 /* !X && X is always false. */
12495 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12496 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12497 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12498 /* X && !X is always false. */
12499 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12500 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12501 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12503 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12504 means A >= Y && A != MAX, but in this case we know that
12505 A < X <= MAX. */
12507 if (!TREE_SIDE_EFFECTS (arg0)
12508 && !TREE_SIDE_EFFECTS (arg1))
12510 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12511 if (tem && !operand_equal_p (tem, arg0, 0))
12512 return fold_build2_loc (loc, code, type, tem, arg1);
12514 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12515 if (tem && !operand_equal_p (tem, arg1, 0))
12516 return fold_build2_loc (loc, code, type, arg0, tem);
12519 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12520 != NULL_TREE)
12521 return tem;
12523 return NULL_TREE;
12525 case TRUTH_ORIF_EXPR:
12526 /* Note that the operands of this must be ints
12527 and their values must be 0 or true.
12528 ("true" is a fixed value perhaps depending on the language.) */
12529 /* If first arg is constant true, return it. */
12530 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12531 return fold_convert_loc (loc, type, arg0);
12532 case TRUTH_OR_EXPR:
12533 /* If either arg is constant zero, drop it. */
12534 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12535 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12536 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12537 /* Preserve sequence points. */
12538 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12539 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12540 /* If second arg is constant true, result is true, but we must
12541 evaluate first arg. */
12542 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12543 return omit_one_operand_loc (loc, type, arg1, arg0);
12544 /* Likewise for first arg, but note this only occurs here for
12545 TRUTH_OR_EXPR. */
12546 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12547 return omit_one_operand_loc (loc, type, arg0, arg1);
12549 /* !X || X is always true. */
12550 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12551 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12552 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12553 /* X || !X is always true. */
12554 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12555 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12556 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12558 /* (X && !Y) || (!X && Y) is X ^ Y */
12559 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12560 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12562 tree a0, a1, l0, l1, n0, n1;
12564 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12565 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12567 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12568 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12570 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12571 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12573 if ((operand_equal_p (n0, a0, 0)
12574 && operand_equal_p (n1, a1, 0))
12575 || (operand_equal_p (n0, a1, 0)
12576 && operand_equal_p (n1, a0, 0)))
12577 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12580 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12581 != NULL_TREE)
12582 return tem;
12584 return NULL_TREE;
12586 case TRUTH_XOR_EXPR:
12587 /* If the second arg is constant zero, drop it. */
12588 if (integer_zerop (arg1))
12589 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12590 /* If the second arg is constant true, this is a logical inversion. */
12591 if (integer_onep (arg1))
12593 /* Only call invert_truthvalue if operand is a truth value. */
12594 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12595 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12596 else
12597 tem = invert_truthvalue_loc (loc, arg0);
12598 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12600 /* Identical arguments cancel to zero. */
12601 if (operand_equal_p (arg0, arg1, 0))
12602 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12604 /* !X ^ X is always true. */
12605 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12606 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12607 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12609 /* X ^ !X is always true. */
12610 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12611 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12612 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12614 return NULL_TREE;
12616 case EQ_EXPR:
12617 case NE_EXPR:
12618 STRIP_NOPS (arg0);
12619 STRIP_NOPS (arg1);
12621 tem = fold_comparison (loc, code, type, op0, op1);
12622 if (tem != NULL_TREE)
12623 return tem;
12625 /* bool_var != 0 becomes bool_var. */
12626 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12627 && code == NE_EXPR)
12628 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12630 /* bool_var == 1 becomes bool_var. */
12631 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12632 && code == EQ_EXPR)
12633 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12635 /* bool_var != 1 becomes !bool_var. */
12636 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12637 && code == NE_EXPR)
12638 return fold_convert_loc (loc, type,
12639 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12640 TREE_TYPE (arg0), arg0));
12642 /* bool_var == 0 becomes !bool_var. */
12643 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12644 && code == EQ_EXPR)
12645 return fold_convert_loc (loc, type,
12646 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12647 TREE_TYPE (arg0), arg0));
12649 /* !exp != 0 becomes !exp */
12650 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12651 && code == NE_EXPR)
12652 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12654 /* If this is an equality comparison of the address of two non-weak,
12655 unaliased symbols neither of which are extern (since we do not
12656 have access to attributes for externs), then we know the result. */
12657 if (TREE_CODE (arg0) == ADDR_EXPR
12658 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12659 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12660 && ! lookup_attribute ("alias",
12661 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12662 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12663 && TREE_CODE (arg1) == ADDR_EXPR
12664 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12665 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12666 && ! lookup_attribute ("alias",
12667 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12668 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12670 /* We know that we're looking at the address of two
12671 non-weak, unaliased, static _DECL nodes.
12673 It is both wasteful and incorrect to call operand_equal_p
12674 to compare the two ADDR_EXPR nodes. It is wasteful in that
12675 all we need to do is test pointer equality for the arguments
12676 to the two ADDR_EXPR nodes. It is incorrect to use
12677 operand_equal_p as that function is NOT equivalent to a
12678 C equality test. It can in fact return false for two
12679 objects which would test as equal using the C equality
12680 operator. */
12681 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12682 return constant_boolean_node (equal
12683 ? code == EQ_EXPR : code != EQ_EXPR,
12684 type);
12687 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12688 a MINUS_EXPR of a constant, we can convert it into a comparison with
12689 a revised constant as long as no overflow occurs. */
12690 if (TREE_CODE (arg1) == INTEGER_CST
12691 && (TREE_CODE (arg0) == PLUS_EXPR
12692 || TREE_CODE (arg0) == MINUS_EXPR)
12693 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12694 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12695 ? MINUS_EXPR : PLUS_EXPR,
12696 fold_convert_loc (loc, TREE_TYPE (arg0),
12697 arg1),
12698 TREE_OPERAND (arg0, 1)))
12699 && !TREE_OVERFLOW (tem))
12700 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12702 /* Similarly for a NEGATE_EXPR. */
12703 if (TREE_CODE (arg0) == NEGATE_EXPR
12704 && TREE_CODE (arg1) == INTEGER_CST
12705 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12706 arg1)))
12707 && TREE_CODE (tem) == INTEGER_CST
12708 && !TREE_OVERFLOW (tem))
12709 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12711 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12712 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12713 && TREE_CODE (arg1) == INTEGER_CST
12714 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12715 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12716 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12717 fold_convert_loc (loc,
12718 TREE_TYPE (arg0),
12719 arg1),
12720 TREE_OPERAND (arg0, 1)));
12722 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12723 if ((TREE_CODE (arg0) == PLUS_EXPR
12724 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12725 || TREE_CODE (arg0) == MINUS_EXPR)
12726 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12727 0)),
12728 arg1, 0)
12729 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12730 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12732 tree val = TREE_OPERAND (arg0, 1);
12733 return omit_two_operands_loc (loc, type,
12734 fold_build2_loc (loc, code, type,
12735 val,
12736 build_int_cst (TREE_TYPE (val),
12737 0)),
12738 TREE_OPERAND (arg0, 0), arg1);
12741 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12742 if (TREE_CODE (arg0) == MINUS_EXPR
12743 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12744 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12745 1)),
12746 arg1, 0)
12747 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12749 return omit_two_operands_loc (loc, type,
12750 code == NE_EXPR
12751 ? boolean_true_node : boolean_false_node,
12752 TREE_OPERAND (arg0, 1), arg1);
12755 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12756 for !=. Don't do this for ordered comparisons due to overflow. */
12757 if (TREE_CODE (arg0) == MINUS_EXPR
12758 && integer_zerop (arg1))
12759 return fold_build2_loc (loc, code, type,
12760 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12762 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12763 if (TREE_CODE (arg0) == ABS_EXPR
12764 && (integer_zerop (arg1) || real_zerop (arg1)))
12765 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12767 /* If this is an EQ or NE comparison with zero and ARG0 is
12768 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12769 two operations, but the latter can be done in one less insn
12770 on machines that have only two-operand insns or on which a
12771 constant cannot be the first operand. */
12772 if (TREE_CODE (arg0) == BIT_AND_EXPR
12773 && integer_zerop (arg1))
12775 tree arg00 = TREE_OPERAND (arg0, 0);
12776 tree arg01 = TREE_OPERAND (arg0, 1);
12777 if (TREE_CODE (arg00) == LSHIFT_EXPR
12778 && integer_onep (TREE_OPERAND (arg00, 0)))
12780 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12781 arg01, TREE_OPERAND (arg00, 1));
12782 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12783 build_int_cst (TREE_TYPE (arg0), 1));
12784 return fold_build2_loc (loc, code, type,
12785 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12786 arg1);
12788 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12789 && integer_onep (TREE_OPERAND (arg01, 0)))
12791 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12792 arg00, TREE_OPERAND (arg01, 1));
12793 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12794 build_int_cst (TREE_TYPE (arg0), 1));
12795 return fold_build2_loc (loc, code, type,
12796 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12797 arg1);
12801 /* If this is an NE or EQ comparison of zero against the result of a
12802 signed MOD operation whose second operand is a power of 2, make
12803 the MOD operation unsigned since it is simpler and equivalent. */
12804 if (integer_zerop (arg1)
12805 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12806 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12807 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12808 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12809 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12810 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12812 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12813 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12814 fold_convert_loc (loc, newtype,
12815 TREE_OPERAND (arg0, 0)),
12816 fold_convert_loc (loc, newtype,
12817 TREE_OPERAND (arg0, 1)));
12819 return fold_build2_loc (loc, code, type, newmod,
12820 fold_convert_loc (loc, newtype, arg1));
12823 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12824 C1 is a valid shift constant, and C2 is a power of two, i.e.
12825 a single bit. */
12826 if (TREE_CODE (arg0) == BIT_AND_EXPR
12827 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12828 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12829 == INTEGER_CST
12830 && integer_pow2p (TREE_OPERAND (arg0, 1))
12831 && integer_zerop (arg1))
12833 tree itype = TREE_TYPE (arg0);
12834 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12835 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12837 /* Check for a valid shift count. */
12838 if (TREE_INT_CST_HIGH (arg001) == 0
12839 && TREE_INT_CST_LOW (arg001) < prec)
12841 tree arg01 = TREE_OPERAND (arg0, 1);
12842 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12843 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12844 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12845 can be rewritten as (X & (C2 << C1)) != 0. */
12846 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12848 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12849 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12850 return fold_build2_loc (loc, code, type, tem,
12851 fold_convert_loc (loc, itype, arg1));
12853 /* Otherwise, for signed (arithmetic) shifts,
12854 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12855 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12856 else if (!TYPE_UNSIGNED (itype))
12857 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12858 arg000, build_int_cst (itype, 0));
12859 /* Otherwise, of unsigned (logical) shifts,
12860 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12861 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12862 else
12863 return omit_one_operand_loc (loc, type,
12864 code == EQ_EXPR ? integer_one_node
12865 : integer_zero_node,
12866 arg000);
12870 /* If we have (A & C) == C where C is a power of 2, convert this into
12871 (A & C) != 0. Similarly for NE_EXPR. */
12872 if (TREE_CODE (arg0) == BIT_AND_EXPR
12873 && integer_pow2p (TREE_OPERAND (arg0, 1))
12874 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12875 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12876 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12877 integer_zero_node));
12879 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12880 bit, then fold the expression into A < 0 or A >= 0. */
12881 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12882 if (tem)
12883 return tem;
12885 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12886 Similarly for NE_EXPR. */
12887 if (TREE_CODE (arg0) == BIT_AND_EXPR
12888 && TREE_CODE (arg1) == INTEGER_CST
12889 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12891 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12892 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12893 TREE_OPERAND (arg0, 1));
12894 tree dandnotc
12895 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12896 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12897 notc);
12898 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12899 if (integer_nonzerop (dandnotc))
12900 return omit_one_operand_loc (loc, type, rslt, arg0);
12903 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12904 Similarly for NE_EXPR. */
12905 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12906 && TREE_CODE (arg1) == INTEGER_CST
12907 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12909 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12910 tree candnotd
12911 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12912 TREE_OPERAND (arg0, 1),
12913 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12914 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12915 if (integer_nonzerop (candnotd))
12916 return omit_one_operand_loc (loc, type, rslt, arg0);
12919 /* If this is a comparison of a field, we may be able to simplify it. */
12920 if ((TREE_CODE (arg0) == COMPONENT_REF
12921 || TREE_CODE (arg0) == BIT_FIELD_REF)
12922 /* Handle the constant case even without -O
12923 to make sure the warnings are given. */
12924 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12926 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12927 if (t1)
12928 return t1;
12931 /* Optimize comparisons of strlen vs zero to a compare of the
12932 first character of the string vs zero. To wit,
12933 strlen(ptr) == 0 => *ptr == 0
12934 strlen(ptr) != 0 => *ptr != 0
12935 Other cases should reduce to one of these two (or a constant)
12936 due to the return value of strlen being unsigned. */
12937 if (TREE_CODE (arg0) == CALL_EXPR
12938 && integer_zerop (arg1))
12940 tree fndecl = get_callee_fndecl (arg0);
12942 if (fndecl
12943 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12944 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12945 && call_expr_nargs (arg0) == 1
12946 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12948 tree iref = build_fold_indirect_ref_loc (loc,
12949 CALL_EXPR_ARG (arg0, 0));
12950 return fold_build2_loc (loc, code, type, iref,
12951 build_int_cst (TREE_TYPE (iref), 0));
12955 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12956 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12957 if (TREE_CODE (arg0) == RSHIFT_EXPR
12958 && integer_zerop (arg1)
12959 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12961 tree arg00 = TREE_OPERAND (arg0, 0);
12962 tree arg01 = TREE_OPERAND (arg0, 1);
12963 tree itype = TREE_TYPE (arg00);
12964 if (TREE_INT_CST_HIGH (arg01) == 0
12965 && TREE_INT_CST_LOW (arg01)
12966 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12968 if (TYPE_UNSIGNED (itype))
12970 itype = signed_type_for (itype);
12971 arg00 = fold_convert_loc (loc, itype, arg00);
12973 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12974 type, arg00, build_zero_cst (itype));
12978 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12979 if (integer_zerop (arg1)
12980 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12981 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12982 TREE_OPERAND (arg0, 1));
12984 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12985 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12986 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12987 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12988 build_zero_cst (TREE_TYPE (arg0)));
12989 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12990 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12991 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12992 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12993 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12994 build_zero_cst (TREE_TYPE (arg0)));
12996 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12997 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12998 && TREE_CODE (arg1) == INTEGER_CST
12999 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13000 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13001 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13002 TREE_OPERAND (arg0, 1), arg1));
13004 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13005 (X & C) == 0 when C is a single bit. */
13006 if (TREE_CODE (arg0) == BIT_AND_EXPR
13007 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13008 && integer_zerop (arg1)
13009 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13011 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13012 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13013 TREE_OPERAND (arg0, 1));
13014 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13015 type, tem,
13016 fold_convert_loc (loc, TREE_TYPE (arg0),
13017 arg1));
13020 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13021 constant C is a power of two, i.e. a single bit. */
13022 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13023 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13024 && integer_zerop (arg1)
13025 && integer_pow2p (TREE_OPERAND (arg0, 1))
13026 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13027 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13029 tree arg00 = TREE_OPERAND (arg0, 0);
13030 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13031 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13034 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13035 when is C is a power of two, i.e. a single bit. */
13036 if (TREE_CODE (arg0) == BIT_AND_EXPR
13037 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13038 && integer_zerop (arg1)
13039 && integer_pow2p (TREE_OPERAND (arg0, 1))
13040 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13041 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13043 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13044 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13045 arg000, TREE_OPERAND (arg0, 1));
13046 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13047 tem, build_int_cst (TREE_TYPE (tem), 0));
13050 if (integer_zerop (arg1)
13051 && tree_expr_nonzero_p (arg0))
13053 tree res = constant_boolean_node (code==NE_EXPR, type);
13054 return omit_one_operand_loc (loc, type, res, arg0);
13057 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13058 if (TREE_CODE (arg0) == NEGATE_EXPR
13059 && TREE_CODE (arg1) == NEGATE_EXPR)
13060 return fold_build2_loc (loc, code, type,
13061 TREE_OPERAND (arg0, 0),
13062 fold_convert_loc (loc, TREE_TYPE (arg0),
13063 TREE_OPERAND (arg1, 0)));
13065 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13066 if (TREE_CODE (arg0) == BIT_AND_EXPR
13067 && TREE_CODE (arg1) == BIT_AND_EXPR)
13069 tree arg00 = TREE_OPERAND (arg0, 0);
13070 tree arg01 = TREE_OPERAND (arg0, 1);
13071 tree arg10 = TREE_OPERAND (arg1, 0);
13072 tree arg11 = TREE_OPERAND (arg1, 1);
13073 tree itype = TREE_TYPE (arg0);
13075 if (operand_equal_p (arg01, arg11, 0))
13076 return fold_build2_loc (loc, code, type,
13077 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13078 fold_build2_loc (loc,
13079 BIT_XOR_EXPR, itype,
13080 arg00, arg10),
13081 arg01),
13082 build_zero_cst (itype));
13084 if (operand_equal_p (arg01, arg10, 0))
13085 return fold_build2_loc (loc, code, type,
13086 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13087 fold_build2_loc (loc,
13088 BIT_XOR_EXPR, itype,
13089 arg00, arg11),
13090 arg01),
13091 build_zero_cst (itype));
13093 if (operand_equal_p (arg00, arg11, 0))
13094 return fold_build2_loc (loc, code, type,
13095 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13096 fold_build2_loc (loc,
13097 BIT_XOR_EXPR, itype,
13098 arg01, arg10),
13099 arg00),
13100 build_zero_cst (itype));
13102 if (operand_equal_p (arg00, arg10, 0))
13103 return fold_build2_loc (loc, code, type,
13104 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13105 fold_build2_loc (loc,
13106 BIT_XOR_EXPR, itype,
13107 arg01, arg11),
13108 arg00),
13109 build_zero_cst (itype));
13112 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13113 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13115 tree arg00 = TREE_OPERAND (arg0, 0);
13116 tree arg01 = TREE_OPERAND (arg0, 1);
13117 tree arg10 = TREE_OPERAND (arg1, 0);
13118 tree arg11 = TREE_OPERAND (arg1, 1);
13119 tree itype = TREE_TYPE (arg0);
13121 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13122 operand_equal_p guarantees no side-effects so we don't need
13123 to use omit_one_operand on Z. */
13124 if (operand_equal_p (arg01, arg11, 0))
13125 return fold_build2_loc (loc, code, type, arg00,
13126 fold_convert_loc (loc, TREE_TYPE (arg00),
13127 arg10));
13128 if (operand_equal_p (arg01, arg10, 0))
13129 return fold_build2_loc (loc, code, type, arg00,
13130 fold_convert_loc (loc, TREE_TYPE (arg00),
13131 arg11));
13132 if (operand_equal_p (arg00, arg11, 0))
13133 return fold_build2_loc (loc, code, type, arg01,
13134 fold_convert_loc (loc, TREE_TYPE (arg01),
13135 arg10));
13136 if (operand_equal_p (arg00, arg10, 0))
13137 return fold_build2_loc (loc, code, type, arg01,
13138 fold_convert_loc (loc, TREE_TYPE (arg01),
13139 arg11));
13141 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13142 if (TREE_CODE (arg01) == INTEGER_CST
13143 && TREE_CODE (arg11) == INTEGER_CST)
13145 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13146 fold_convert_loc (loc, itype, arg11));
13147 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13148 return fold_build2_loc (loc, code, type, tem,
13149 fold_convert_loc (loc, itype, arg10));
13153 /* Attempt to simplify equality/inequality comparisons of complex
13154 values. Only lower the comparison if the result is known or
13155 can be simplified to a single scalar comparison. */
13156 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13157 || TREE_CODE (arg0) == COMPLEX_CST)
13158 && (TREE_CODE (arg1) == COMPLEX_EXPR
13159 || TREE_CODE (arg1) == COMPLEX_CST))
13161 tree real0, imag0, real1, imag1;
13162 tree rcond, icond;
13164 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13166 real0 = TREE_OPERAND (arg0, 0);
13167 imag0 = TREE_OPERAND (arg0, 1);
13169 else
13171 real0 = TREE_REALPART (arg0);
13172 imag0 = TREE_IMAGPART (arg0);
13175 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13177 real1 = TREE_OPERAND (arg1, 0);
13178 imag1 = TREE_OPERAND (arg1, 1);
13180 else
13182 real1 = TREE_REALPART (arg1);
13183 imag1 = TREE_IMAGPART (arg1);
13186 rcond = fold_binary_loc (loc, code, type, real0, real1);
13187 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13189 if (integer_zerop (rcond))
13191 if (code == EQ_EXPR)
13192 return omit_two_operands_loc (loc, type, boolean_false_node,
13193 imag0, imag1);
13194 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13196 else
13198 if (code == NE_EXPR)
13199 return omit_two_operands_loc (loc, type, boolean_true_node,
13200 imag0, imag1);
13201 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13205 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13206 if (icond && TREE_CODE (icond) == INTEGER_CST)
13208 if (integer_zerop (icond))
13210 if (code == EQ_EXPR)
13211 return omit_two_operands_loc (loc, type, boolean_false_node,
13212 real0, real1);
13213 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13215 else
13217 if (code == NE_EXPR)
13218 return omit_two_operands_loc (loc, type, boolean_true_node,
13219 real0, real1);
13220 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13225 return NULL_TREE;
13227 case LT_EXPR:
13228 case GT_EXPR:
13229 case LE_EXPR:
13230 case GE_EXPR:
13231 tem = fold_comparison (loc, code, type, op0, op1);
13232 if (tem != NULL_TREE)
13233 return tem;
13235 /* Transform comparisons of the form X +- C CMP X. */
13236 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13237 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13238 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13239 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13240 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13241 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13243 tree arg01 = TREE_OPERAND (arg0, 1);
13244 enum tree_code code0 = TREE_CODE (arg0);
13245 int is_positive;
13247 if (TREE_CODE (arg01) == REAL_CST)
13248 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13249 else
13250 is_positive = tree_int_cst_sgn (arg01);
13252 /* (X - c) > X becomes false. */
13253 if (code == GT_EXPR
13254 && ((code0 == MINUS_EXPR && is_positive >= 0)
13255 || (code0 == PLUS_EXPR && is_positive <= 0)))
13257 if (TREE_CODE (arg01) == INTEGER_CST
13258 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13259 fold_overflow_warning (("assuming signed overflow does not "
13260 "occur when assuming that (X - c) > X "
13261 "is always false"),
13262 WARN_STRICT_OVERFLOW_ALL);
13263 return constant_boolean_node (0, type);
13266 /* Likewise (X + c) < X becomes false. */
13267 if (code == LT_EXPR
13268 && ((code0 == PLUS_EXPR && is_positive >= 0)
13269 || (code0 == MINUS_EXPR && is_positive <= 0)))
13271 if (TREE_CODE (arg01) == INTEGER_CST
13272 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13273 fold_overflow_warning (("assuming signed overflow does not "
13274 "occur when assuming that "
13275 "(X + c) < X is always false"),
13276 WARN_STRICT_OVERFLOW_ALL);
13277 return constant_boolean_node (0, type);
13280 /* Convert (X - c) <= X to true. */
13281 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13282 && code == LE_EXPR
13283 && ((code0 == MINUS_EXPR && is_positive >= 0)
13284 || (code0 == PLUS_EXPR && is_positive <= 0)))
13286 if (TREE_CODE (arg01) == INTEGER_CST
13287 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13288 fold_overflow_warning (("assuming signed overflow does not "
13289 "occur when assuming that "
13290 "(X - c) <= X is always true"),
13291 WARN_STRICT_OVERFLOW_ALL);
13292 return constant_boolean_node (1, type);
13295 /* Convert (X + c) >= X to true. */
13296 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13297 && code == GE_EXPR
13298 && ((code0 == PLUS_EXPR && is_positive >= 0)
13299 || (code0 == MINUS_EXPR && is_positive <= 0)))
13301 if (TREE_CODE (arg01) == INTEGER_CST
13302 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13303 fold_overflow_warning (("assuming signed overflow does not "
13304 "occur when assuming that "
13305 "(X + c) >= X is always true"),
13306 WARN_STRICT_OVERFLOW_ALL);
13307 return constant_boolean_node (1, type);
13310 if (TREE_CODE (arg01) == INTEGER_CST)
13312 /* Convert X + c > X and X - c < X to true for integers. */
13313 if (code == GT_EXPR
13314 && ((code0 == PLUS_EXPR && is_positive > 0)
13315 || (code0 == MINUS_EXPR && is_positive < 0)))
13317 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13318 fold_overflow_warning (("assuming signed overflow does "
13319 "not occur when assuming that "
13320 "(X + c) > X is always true"),
13321 WARN_STRICT_OVERFLOW_ALL);
13322 return constant_boolean_node (1, type);
13325 if (code == LT_EXPR
13326 && ((code0 == MINUS_EXPR && is_positive > 0)
13327 || (code0 == PLUS_EXPR && is_positive < 0)))
13329 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13330 fold_overflow_warning (("assuming signed overflow does "
13331 "not occur when assuming that "
13332 "(X - c) < X is always true"),
13333 WARN_STRICT_OVERFLOW_ALL);
13334 return constant_boolean_node (1, type);
13337 /* Convert X + c <= X and X - c >= X to false for integers. */
13338 if (code == LE_EXPR
13339 && ((code0 == PLUS_EXPR && is_positive > 0)
13340 || (code0 == MINUS_EXPR && is_positive < 0)))
13342 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13343 fold_overflow_warning (("assuming signed overflow does "
13344 "not occur when assuming that "
13345 "(X + c) <= X is always false"),
13346 WARN_STRICT_OVERFLOW_ALL);
13347 return constant_boolean_node (0, type);
13350 if (code == GE_EXPR
13351 && ((code0 == MINUS_EXPR && is_positive > 0)
13352 || (code0 == PLUS_EXPR && is_positive < 0)))
13354 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13355 fold_overflow_warning (("assuming signed overflow does "
13356 "not occur when assuming that "
13357 "(X - c) >= X is always false"),
13358 WARN_STRICT_OVERFLOW_ALL);
13359 return constant_boolean_node (0, type);
13364 /* Comparisons with the highest or lowest possible integer of
13365 the specified precision will have known values. */
13367 tree arg1_type = TREE_TYPE (arg1);
13368 unsigned int width = TYPE_PRECISION (arg1_type);
13370 if (TREE_CODE (arg1) == INTEGER_CST
13371 && width <= HOST_BITS_PER_DOUBLE_INT
13372 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13374 HOST_WIDE_INT signed_max_hi;
13375 unsigned HOST_WIDE_INT signed_max_lo;
13376 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13378 if (width <= HOST_BITS_PER_WIDE_INT)
13380 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13381 - 1;
13382 signed_max_hi = 0;
13383 max_hi = 0;
13385 if (TYPE_UNSIGNED (arg1_type))
13387 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13388 min_lo = 0;
13389 min_hi = 0;
13391 else
13393 max_lo = signed_max_lo;
13394 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13395 min_hi = -1;
13398 else
13400 width -= HOST_BITS_PER_WIDE_INT;
13401 signed_max_lo = -1;
13402 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13403 - 1;
13404 max_lo = -1;
13405 min_lo = 0;
13407 if (TYPE_UNSIGNED (arg1_type))
13409 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13410 min_hi = 0;
13412 else
13414 max_hi = signed_max_hi;
13415 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13419 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13420 && TREE_INT_CST_LOW (arg1) == max_lo)
13421 switch (code)
13423 case GT_EXPR:
13424 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13426 case GE_EXPR:
13427 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13429 case LE_EXPR:
13430 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13432 case LT_EXPR:
13433 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13435 /* The GE_EXPR and LT_EXPR cases above are not normally
13436 reached because of previous transformations. */
13438 default:
13439 break;
13441 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13442 == max_hi
13443 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13444 switch (code)
13446 case GT_EXPR:
13447 arg1 = const_binop (PLUS_EXPR, arg1,
13448 build_int_cst (TREE_TYPE (arg1), 1));
13449 return fold_build2_loc (loc, EQ_EXPR, type,
13450 fold_convert_loc (loc,
13451 TREE_TYPE (arg1), arg0),
13452 arg1);
13453 case LE_EXPR:
13454 arg1 = const_binop (PLUS_EXPR, arg1,
13455 build_int_cst (TREE_TYPE (arg1), 1));
13456 return fold_build2_loc (loc, NE_EXPR, type,
13457 fold_convert_loc (loc, TREE_TYPE (arg1),
13458 arg0),
13459 arg1);
13460 default:
13461 break;
13463 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13464 == min_hi
13465 && TREE_INT_CST_LOW (arg1) == min_lo)
13466 switch (code)
13468 case LT_EXPR:
13469 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13471 case LE_EXPR:
13472 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13474 case GE_EXPR:
13475 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13477 case GT_EXPR:
13478 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13480 default:
13481 break;
13483 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13484 == min_hi
13485 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13486 switch (code)
13488 case GE_EXPR:
13489 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13490 return fold_build2_loc (loc, NE_EXPR, type,
13491 fold_convert_loc (loc,
13492 TREE_TYPE (arg1), arg0),
13493 arg1);
13494 case LT_EXPR:
13495 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13496 return fold_build2_loc (loc, EQ_EXPR, type,
13497 fold_convert_loc (loc, TREE_TYPE (arg1),
13498 arg0),
13499 arg1);
13500 default:
13501 break;
13504 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13505 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13506 && TYPE_UNSIGNED (arg1_type)
13507 /* We will flip the signedness of the comparison operator
13508 associated with the mode of arg1, so the sign bit is
13509 specified by this mode. Check that arg1 is the signed
13510 max associated with this sign bit. */
13511 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13512 /* signed_type does not work on pointer types. */
13513 && INTEGRAL_TYPE_P (arg1_type))
13515 /* The following case also applies to X < signed_max+1
13516 and X >= signed_max+1 because previous transformations. */
13517 if (code == LE_EXPR || code == GT_EXPR)
13519 tree st;
13520 st = signed_type_for (TREE_TYPE (arg1));
13521 return fold_build2_loc (loc,
13522 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13523 type, fold_convert_loc (loc, st, arg0),
13524 build_int_cst (st, 0));
13530 /* If we are comparing an ABS_EXPR with a constant, we can
13531 convert all the cases into explicit comparisons, but they may
13532 well not be faster than doing the ABS and one comparison.
13533 But ABS (X) <= C is a range comparison, which becomes a subtraction
13534 and a comparison, and is probably faster. */
13535 if (code == LE_EXPR
13536 && TREE_CODE (arg1) == INTEGER_CST
13537 && TREE_CODE (arg0) == ABS_EXPR
13538 && ! TREE_SIDE_EFFECTS (arg0)
13539 && (0 != (tem = negate_expr (arg1)))
13540 && TREE_CODE (tem) == INTEGER_CST
13541 && !TREE_OVERFLOW (tem))
13542 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13543 build2 (GE_EXPR, type,
13544 TREE_OPERAND (arg0, 0), tem),
13545 build2 (LE_EXPR, type,
13546 TREE_OPERAND (arg0, 0), arg1));
13548 /* Convert ABS_EXPR<x> >= 0 to true. */
13549 strict_overflow_p = false;
13550 if (code == GE_EXPR
13551 && (integer_zerop (arg1)
13552 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13553 && real_zerop (arg1)))
13554 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13556 if (strict_overflow_p)
13557 fold_overflow_warning (("assuming signed overflow does not occur "
13558 "when simplifying comparison of "
13559 "absolute value and zero"),
13560 WARN_STRICT_OVERFLOW_CONDITIONAL);
13561 return omit_one_operand_loc (loc, type,
13562 constant_boolean_node (true, type),
13563 arg0);
13566 /* Convert ABS_EXPR<x> < 0 to false. */
13567 strict_overflow_p = false;
13568 if (code == LT_EXPR
13569 && (integer_zerop (arg1) || real_zerop (arg1))
13570 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13572 if (strict_overflow_p)
13573 fold_overflow_warning (("assuming signed overflow does not occur "
13574 "when simplifying comparison of "
13575 "absolute value and zero"),
13576 WARN_STRICT_OVERFLOW_CONDITIONAL);
13577 return omit_one_operand_loc (loc, type,
13578 constant_boolean_node (false, type),
13579 arg0);
13582 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13583 and similarly for >= into !=. */
13584 if ((code == LT_EXPR || code == GE_EXPR)
13585 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13586 && TREE_CODE (arg1) == LSHIFT_EXPR
13587 && integer_onep (TREE_OPERAND (arg1, 0)))
13588 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13589 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13590 TREE_OPERAND (arg1, 1)),
13591 build_zero_cst (TREE_TYPE (arg0)));
13593 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13594 otherwise Y might be >= # of bits in X's type and thus e.g.
13595 (unsigned char) (1 << Y) for Y 15 might be 0.
13596 If the cast is widening, then 1 << Y should have unsigned type,
13597 otherwise if Y is number of bits in the signed shift type minus 1,
13598 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13599 31 might be 0xffffffff80000000. */
13600 if ((code == LT_EXPR || code == GE_EXPR)
13601 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13602 && CONVERT_EXPR_P (arg1)
13603 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13604 && (TYPE_PRECISION (TREE_TYPE (arg1))
13605 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13606 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13607 || (TYPE_PRECISION (TREE_TYPE (arg1))
13608 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13609 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13611 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13612 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13613 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13614 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13615 build_zero_cst (TREE_TYPE (arg0)));
13618 return NULL_TREE;
13620 case UNORDERED_EXPR:
13621 case ORDERED_EXPR:
13622 case UNLT_EXPR:
13623 case UNLE_EXPR:
13624 case UNGT_EXPR:
13625 case UNGE_EXPR:
13626 case UNEQ_EXPR:
13627 case LTGT_EXPR:
13628 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13630 t1 = fold_relational_const (code, type, arg0, arg1);
13631 if (t1 != NULL_TREE)
13632 return t1;
13635 /* If the first operand is NaN, the result is constant. */
13636 if (TREE_CODE (arg0) == REAL_CST
13637 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13638 && (code != LTGT_EXPR || ! flag_trapping_math))
13640 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13641 ? integer_zero_node
13642 : integer_one_node;
13643 return omit_one_operand_loc (loc, type, t1, arg1);
13646 /* If the second operand is NaN, the result is constant. */
13647 if (TREE_CODE (arg1) == REAL_CST
13648 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13649 && (code != LTGT_EXPR || ! flag_trapping_math))
13651 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13652 ? integer_zero_node
13653 : integer_one_node;
13654 return omit_one_operand_loc (loc, type, t1, arg0);
13657 /* Simplify unordered comparison of something with itself. */
13658 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13659 && operand_equal_p (arg0, arg1, 0))
13660 return constant_boolean_node (1, type);
13662 if (code == LTGT_EXPR
13663 && !flag_trapping_math
13664 && operand_equal_p (arg0, arg1, 0))
13665 return constant_boolean_node (0, type);
13667 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13669 tree targ0 = strip_float_extensions (arg0);
13670 tree targ1 = strip_float_extensions (arg1);
13671 tree newtype = TREE_TYPE (targ0);
13673 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13674 newtype = TREE_TYPE (targ1);
13676 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13677 return fold_build2_loc (loc, code, type,
13678 fold_convert_loc (loc, newtype, targ0),
13679 fold_convert_loc (loc, newtype, targ1));
13682 return NULL_TREE;
13684 case COMPOUND_EXPR:
13685 /* When pedantic, a compound expression can be neither an lvalue
13686 nor an integer constant expression. */
13687 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13688 return NULL_TREE;
13689 /* Don't let (0, 0) be null pointer constant. */
13690 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13691 : fold_convert_loc (loc, type, arg1);
13692 return pedantic_non_lvalue_loc (loc, tem);
13694 case COMPLEX_EXPR:
13695 if ((TREE_CODE (arg0) == REAL_CST
13696 && TREE_CODE (arg1) == REAL_CST)
13697 || (TREE_CODE (arg0) == INTEGER_CST
13698 && TREE_CODE (arg1) == INTEGER_CST))
13699 return build_complex (type, arg0, arg1);
13700 if (TREE_CODE (arg0) == REALPART_EXPR
13701 && TREE_CODE (arg1) == IMAGPART_EXPR
13702 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13703 && operand_equal_p (TREE_OPERAND (arg0, 0),
13704 TREE_OPERAND (arg1, 0), 0))
13705 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13706 TREE_OPERAND (arg1, 0));
13707 return NULL_TREE;
13709 case ASSERT_EXPR:
13710 /* An ASSERT_EXPR should never be passed to fold_binary. */
13711 gcc_unreachable ();
13713 case VEC_PACK_TRUNC_EXPR:
13714 case VEC_PACK_FIX_TRUNC_EXPR:
13716 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13717 tree *elts;
13719 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13720 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13721 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13722 return NULL_TREE;
13724 elts = XALLOCAVEC (tree, nelts);
13725 if (!vec_cst_ctor_to_array (arg0, elts)
13726 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13727 return NULL_TREE;
13729 for (i = 0; i < nelts; i++)
13731 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13732 ? NOP_EXPR : FIX_TRUNC_EXPR,
13733 TREE_TYPE (type), elts[i]);
13734 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13735 return NULL_TREE;
13738 return build_vector (type, elts);
13741 case VEC_WIDEN_MULT_LO_EXPR:
13742 case VEC_WIDEN_MULT_HI_EXPR:
13743 case VEC_WIDEN_MULT_EVEN_EXPR:
13744 case VEC_WIDEN_MULT_ODD_EXPR:
13746 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13747 unsigned int out, ofs, scale;
13748 tree *elts;
13750 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13751 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13752 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13753 return NULL_TREE;
13755 elts = XALLOCAVEC (tree, nelts * 4);
13756 if (!vec_cst_ctor_to_array (arg0, elts)
13757 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13758 return NULL_TREE;
13760 if (code == VEC_WIDEN_MULT_LO_EXPR)
13761 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13762 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13763 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13764 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13765 scale = 1, ofs = 0;
13766 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13767 scale = 1, ofs = 1;
13769 for (out = 0; out < nelts; out++)
13771 unsigned int in1 = (out << scale) + ofs;
13772 unsigned int in2 = in1 + nelts * 2;
13773 tree t1, t2;
13775 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13776 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13778 if (t1 == NULL_TREE || t2 == NULL_TREE)
13779 return NULL_TREE;
13780 elts[out] = const_binop (MULT_EXPR, t1, t2);
13781 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13782 return NULL_TREE;
13785 return build_vector (type, elts);
13788 default:
13789 return NULL_TREE;
13790 } /* switch (code) */
13793 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13794 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13795 of GOTO_EXPR. */
13797 static tree
13798 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13800 switch (TREE_CODE (*tp))
13802 case LABEL_EXPR:
13803 return *tp;
13805 case GOTO_EXPR:
13806 *walk_subtrees = 0;
13808 /* ... fall through ... */
13810 default:
13811 return NULL_TREE;
13815 /* Return whether the sub-tree ST contains a label which is accessible from
13816 outside the sub-tree. */
13818 static bool
13819 contains_label_p (tree st)
13821 return
13822 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13825 /* Fold a ternary expression of code CODE and type TYPE with operands
13826 OP0, OP1, and OP2. Return the folded expression if folding is
13827 successful. Otherwise, return NULL_TREE. */
13829 tree
13830 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13831 tree op0, tree op1, tree op2)
13833 tree tem;
13834 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13835 enum tree_code_class kind = TREE_CODE_CLASS (code);
13837 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13838 && TREE_CODE_LENGTH (code) == 3);
13840 /* Strip any conversions that don't change the mode. This is safe
13841 for every expression, except for a comparison expression because
13842 its signedness is derived from its operands. So, in the latter
13843 case, only strip conversions that don't change the signedness.
13845 Note that this is done as an internal manipulation within the
13846 constant folder, in order to find the simplest representation of
13847 the arguments so that their form can be studied. In any cases,
13848 the appropriate type conversions should be put back in the tree
13849 that will get out of the constant folder. */
13850 if (op0)
13852 arg0 = op0;
13853 STRIP_NOPS (arg0);
13856 if (op1)
13858 arg1 = op1;
13859 STRIP_NOPS (arg1);
13862 if (op2)
13864 arg2 = op2;
13865 STRIP_NOPS (arg2);
13868 switch (code)
13870 case COMPONENT_REF:
13871 if (TREE_CODE (arg0) == CONSTRUCTOR
13872 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13874 unsigned HOST_WIDE_INT idx;
13875 tree field, value;
13876 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13877 if (field == arg1)
13878 return value;
13880 return NULL_TREE;
13882 case COND_EXPR:
13883 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13884 so all simple results must be passed through pedantic_non_lvalue. */
13885 if (TREE_CODE (arg0) == INTEGER_CST)
13887 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13888 tem = integer_zerop (arg0) ? op2 : op1;
13889 /* Only optimize constant conditions when the selected branch
13890 has the same type as the COND_EXPR. This avoids optimizing
13891 away "c ? x : throw", where the throw has a void type.
13892 Avoid throwing away that operand which contains label. */
13893 if ((!TREE_SIDE_EFFECTS (unused_op)
13894 || !contains_label_p (unused_op))
13895 && (! VOID_TYPE_P (TREE_TYPE (tem))
13896 || VOID_TYPE_P (type)))
13897 return pedantic_non_lvalue_loc (loc, tem);
13898 return NULL_TREE;
13900 if (operand_equal_p (arg1, op2, 0))
13901 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13903 /* If we have A op B ? A : C, we may be able to convert this to a
13904 simpler expression, depending on the operation and the values
13905 of B and C. Signed zeros prevent all of these transformations,
13906 for reasons given above each one.
13908 Also try swapping the arguments and inverting the conditional. */
13909 if (COMPARISON_CLASS_P (arg0)
13910 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13911 arg1, TREE_OPERAND (arg0, 1))
13912 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13914 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13915 if (tem)
13916 return tem;
13919 if (COMPARISON_CLASS_P (arg0)
13920 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13921 op2,
13922 TREE_OPERAND (arg0, 1))
13923 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13925 location_t loc0 = expr_location_or (arg0, loc);
13926 tem = fold_truth_not_expr (loc0, arg0);
13927 if (tem && COMPARISON_CLASS_P (tem))
13929 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13930 if (tem)
13931 return tem;
13935 /* If the second operand is simpler than the third, swap them
13936 since that produces better jump optimization results. */
13937 if (truth_value_p (TREE_CODE (arg0))
13938 && tree_swap_operands_p (op1, op2, false))
13940 location_t loc0 = expr_location_or (arg0, loc);
13941 /* See if this can be inverted. If it can't, possibly because
13942 it was a floating-point inequality comparison, don't do
13943 anything. */
13944 tem = fold_truth_not_expr (loc0, arg0);
13945 if (tem)
13946 return fold_build3_loc (loc, code, type, tem, op2, op1);
13949 /* Convert A ? 1 : 0 to simply A. */
13950 if (integer_onep (op1)
13951 && integer_zerop (op2)
13952 /* If we try to convert OP0 to our type, the
13953 call to fold will try to move the conversion inside
13954 a COND, which will recurse. In that case, the COND_EXPR
13955 is probably the best choice, so leave it alone. */
13956 && type == TREE_TYPE (arg0))
13957 return pedantic_non_lvalue_loc (loc, arg0);
13959 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13960 over COND_EXPR in cases such as floating point comparisons. */
13961 if (integer_zerop (op1)
13962 && integer_onep (op2)
13963 && truth_value_p (TREE_CODE (arg0)))
13964 return pedantic_non_lvalue_loc (loc,
13965 fold_convert_loc (loc, type,
13966 invert_truthvalue_loc (loc,
13967 arg0)));
13969 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13970 if (TREE_CODE (arg0) == LT_EXPR
13971 && integer_zerop (TREE_OPERAND (arg0, 1))
13972 && integer_zerop (op2)
13973 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13975 /* sign_bit_p only checks ARG1 bits within A's precision.
13976 If <sign bit of A> has wider type than A, bits outside
13977 of A's precision in <sign bit of A> need to be checked.
13978 If they are all 0, this optimization needs to be done
13979 in unsigned A's type, if they are all 1 in signed A's type,
13980 otherwise this can't be done. */
13981 if (TYPE_PRECISION (TREE_TYPE (tem))
13982 < TYPE_PRECISION (TREE_TYPE (arg1))
13983 && TYPE_PRECISION (TREE_TYPE (tem))
13984 < TYPE_PRECISION (type))
13986 unsigned HOST_WIDE_INT mask_lo;
13987 HOST_WIDE_INT mask_hi;
13988 int inner_width, outer_width;
13989 tree tem_type;
13991 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13992 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13993 if (outer_width > TYPE_PRECISION (type))
13994 outer_width = TYPE_PRECISION (type);
13996 if (outer_width > HOST_BITS_PER_WIDE_INT)
13998 mask_hi = ((unsigned HOST_WIDE_INT) -1
13999 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14000 mask_lo = -1;
14002 else
14004 mask_hi = 0;
14005 mask_lo = ((unsigned HOST_WIDE_INT) -1
14006 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14008 if (inner_width > HOST_BITS_PER_WIDE_INT)
14010 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14011 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14012 mask_lo = 0;
14014 else
14015 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14016 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14018 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14019 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14021 tem_type = signed_type_for (TREE_TYPE (tem));
14022 tem = fold_convert_loc (loc, tem_type, tem);
14024 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14025 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14027 tem_type = unsigned_type_for (TREE_TYPE (tem));
14028 tem = fold_convert_loc (loc, tem_type, tem);
14030 else
14031 tem = NULL;
14034 if (tem)
14035 return
14036 fold_convert_loc (loc, type,
14037 fold_build2_loc (loc, BIT_AND_EXPR,
14038 TREE_TYPE (tem), tem,
14039 fold_convert_loc (loc,
14040 TREE_TYPE (tem),
14041 arg1)));
14044 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14045 already handled above. */
14046 if (TREE_CODE (arg0) == BIT_AND_EXPR
14047 && integer_onep (TREE_OPERAND (arg0, 1))
14048 && integer_zerop (op2)
14049 && integer_pow2p (arg1))
14051 tree tem = TREE_OPERAND (arg0, 0);
14052 STRIP_NOPS (tem);
14053 if (TREE_CODE (tem) == RSHIFT_EXPR
14054 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14055 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14056 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14057 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14058 TREE_OPERAND (tem, 0), arg1);
14061 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14062 is probably obsolete because the first operand should be a
14063 truth value (that's why we have the two cases above), but let's
14064 leave it in until we can confirm this for all front-ends. */
14065 if (integer_zerop (op2)
14066 && TREE_CODE (arg0) == NE_EXPR
14067 && integer_zerop (TREE_OPERAND (arg0, 1))
14068 && integer_pow2p (arg1)
14069 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14070 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14071 arg1, OEP_ONLY_CONST))
14072 return pedantic_non_lvalue_loc (loc,
14073 fold_convert_loc (loc, type,
14074 TREE_OPERAND (arg0, 0)));
14076 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14077 if (integer_zerop (op2)
14078 && truth_value_p (TREE_CODE (arg0))
14079 && truth_value_p (TREE_CODE (arg1)))
14080 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14081 fold_convert_loc (loc, type, arg0),
14082 arg1);
14084 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14085 if (integer_onep (op2)
14086 && truth_value_p (TREE_CODE (arg0))
14087 && truth_value_p (TREE_CODE (arg1)))
14089 location_t loc0 = expr_location_or (arg0, loc);
14090 /* Only perform transformation if ARG0 is easily inverted. */
14091 tem = fold_truth_not_expr (loc0, arg0);
14092 if (tem)
14093 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14094 fold_convert_loc (loc, type, tem),
14095 arg1);
14098 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14099 if (integer_zerop (arg1)
14100 && truth_value_p (TREE_CODE (arg0))
14101 && truth_value_p (TREE_CODE (op2)))
14103 location_t loc0 = expr_location_or (arg0, loc);
14104 /* Only perform transformation if ARG0 is easily inverted. */
14105 tem = fold_truth_not_expr (loc0, arg0);
14106 if (tem)
14107 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14108 fold_convert_loc (loc, type, tem),
14109 op2);
14112 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14113 if (integer_onep (arg1)
14114 && truth_value_p (TREE_CODE (arg0))
14115 && truth_value_p (TREE_CODE (op2)))
14116 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14117 fold_convert_loc (loc, type, arg0),
14118 op2);
14120 return NULL_TREE;
14122 case VEC_COND_EXPR:
14123 if (TREE_CODE (arg0) == VECTOR_CST)
14125 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14126 return pedantic_non_lvalue_loc (loc, op1);
14127 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14128 return pedantic_non_lvalue_loc (loc, op2);
14130 return NULL_TREE;
14132 case CALL_EXPR:
14133 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14134 of fold_ternary on them. */
14135 gcc_unreachable ();
14137 case BIT_FIELD_REF:
14138 if ((TREE_CODE (arg0) == VECTOR_CST
14139 || (TREE_CODE (arg0) == CONSTRUCTOR
14140 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14141 && (type == TREE_TYPE (TREE_TYPE (arg0))
14142 || (TREE_CODE (type) == VECTOR_TYPE
14143 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14145 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14146 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14147 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14148 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14150 if (n != 0
14151 && (idx % width) == 0
14152 && (n % width) == 0
14153 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14155 idx = idx / width;
14156 n = n / width;
14158 if (TREE_CODE (arg0) == VECTOR_CST)
14160 if (n == 1)
14161 return VECTOR_CST_ELT (arg0, idx);
14163 tree *vals = XALLOCAVEC (tree, n);
14164 for (unsigned i = 0; i < n; ++i)
14165 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14166 return build_vector (type, vals);
14169 /* Constructor elements can be subvectors. */
14170 unsigned HOST_WIDE_INT k = 1;
14171 if (CONSTRUCTOR_NELTS (arg0) != 0)
14173 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14174 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14175 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14178 /* We keep an exact subset of the constructor elements. */
14179 if ((idx % k) == 0 && (n % k) == 0)
14181 if (CONSTRUCTOR_NELTS (arg0) == 0)
14182 return build_constructor (type, NULL);
14183 idx /= k;
14184 n /= k;
14185 if (n == 1)
14187 if (idx < CONSTRUCTOR_NELTS (arg0))
14188 return CONSTRUCTOR_ELT (arg0, idx)->value;
14189 return build_zero_cst (type);
14192 vec<constructor_elt, va_gc> *vals;
14193 vec_alloc (vals, n);
14194 for (unsigned i = 0;
14195 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14196 ++i)
14197 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14198 CONSTRUCTOR_ELT
14199 (arg0, idx + i)->value);
14200 return build_constructor (type, vals);
14202 /* The bitfield references a single constructor element. */
14203 else if (idx + n <= (idx / k + 1) * k)
14205 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14206 return build_zero_cst (type);
14207 else if (n == k)
14208 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14209 else
14210 return fold_build3_loc (loc, code, type,
14211 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14212 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14217 /* A bit-field-ref that referenced the full argument can be stripped. */
14218 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14219 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14220 && integer_zerop (op2))
14221 return fold_convert_loc (loc, type, arg0);
14223 /* On constants we can use native encode/interpret to constant
14224 fold (nearly) all BIT_FIELD_REFs. */
14225 if (CONSTANT_CLASS_P (arg0)
14226 && can_native_interpret_type_p (type)
14227 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14228 /* This limitation should not be necessary, we just need to
14229 round this up to mode size. */
14230 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14231 /* Need bit-shifting of the buffer to relax the following. */
14232 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14234 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14235 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14236 unsigned HOST_WIDE_INT clen;
14237 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14238 /* ??? We cannot tell native_encode_expr to start at
14239 some random byte only. So limit us to a reasonable amount
14240 of work. */
14241 if (clen <= 4096)
14243 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14244 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14245 if (len > 0
14246 && len * BITS_PER_UNIT >= bitpos + bitsize)
14248 tree v = native_interpret_expr (type,
14249 b + bitpos / BITS_PER_UNIT,
14250 bitsize / BITS_PER_UNIT);
14251 if (v)
14252 return v;
14257 return NULL_TREE;
14259 case FMA_EXPR:
14260 /* For integers we can decompose the FMA if possible. */
14261 if (TREE_CODE (arg0) == INTEGER_CST
14262 && TREE_CODE (arg1) == INTEGER_CST)
14263 return fold_build2_loc (loc, PLUS_EXPR, type,
14264 const_binop (MULT_EXPR, arg0, arg1), arg2);
14265 if (integer_zerop (arg2))
14266 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14268 return fold_fma (loc, type, arg0, arg1, arg2);
14270 case VEC_PERM_EXPR:
14271 if (TREE_CODE (arg2) == VECTOR_CST)
14273 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14274 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14275 tree t;
14276 bool need_mask_canon = false;
14277 bool all_in_vec0 = true;
14278 bool all_in_vec1 = true;
14279 bool maybe_identity = true;
14280 bool single_arg = (op0 == op1);
14281 bool changed = false;
14283 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14284 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14285 for (i = 0; i < nelts; i++)
14287 tree val = VECTOR_CST_ELT (arg2, i);
14288 if (TREE_CODE (val) != INTEGER_CST)
14289 return NULL_TREE;
14291 sel[i] = TREE_INT_CST_LOW (val) & mask;
14292 if (TREE_INT_CST_HIGH (val)
14293 || ((unsigned HOST_WIDE_INT)
14294 TREE_INT_CST_LOW (val) != sel[i]))
14295 need_mask_canon = true;
14297 if (sel[i] < nelts)
14298 all_in_vec1 = false;
14299 else
14300 all_in_vec0 = false;
14302 if ((sel[i] & (nelts-1)) != i)
14303 maybe_identity = false;
14306 if (maybe_identity)
14308 if (all_in_vec0)
14309 return op0;
14310 if (all_in_vec1)
14311 return op1;
14314 if (all_in_vec0)
14315 op1 = op0;
14316 else if (all_in_vec1)
14318 op0 = op1;
14319 for (i = 0; i < nelts; i++)
14320 sel[i] -= nelts;
14321 need_mask_canon = true;
14324 if ((TREE_CODE (op0) == VECTOR_CST
14325 || TREE_CODE (op0) == CONSTRUCTOR)
14326 && (TREE_CODE (op1) == VECTOR_CST
14327 || TREE_CODE (op1) == CONSTRUCTOR))
14329 t = fold_vec_perm (type, op0, op1, sel);
14330 if (t != NULL_TREE)
14331 return t;
14334 if (op0 == op1 && !single_arg)
14335 changed = true;
14337 if (need_mask_canon && arg2 == op2)
14339 tree *tsel = XALLOCAVEC (tree, nelts);
14340 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14341 for (i = 0; i < nelts; i++)
14342 tsel[i] = build_int_cst (eltype, sel[i]);
14343 op2 = build_vector (TREE_TYPE (arg2), tsel);
14344 changed = true;
14347 if (changed)
14348 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14350 return NULL_TREE;
14352 default:
14353 return NULL_TREE;
14354 } /* switch (code) */
14357 /* Perform constant folding and related simplification of EXPR.
14358 The related simplifications include x*1 => x, x*0 => 0, etc.,
14359 and application of the associative law.
14360 NOP_EXPR conversions may be removed freely (as long as we
14361 are careful not to change the type of the overall expression).
14362 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14363 but we can constant-fold them if they have constant operands. */
14365 #ifdef ENABLE_FOLD_CHECKING
14366 # define fold(x) fold_1 (x)
14367 static tree fold_1 (tree);
14368 static
14369 #endif
14370 tree
14371 fold (tree expr)
14373 const tree t = expr;
14374 enum tree_code code = TREE_CODE (t);
14375 enum tree_code_class kind = TREE_CODE_CLASS (code);
14376 tree tem;
14377 location_t loc = EXPR_LOCATION (expr);
14379 /* Return right away if a constant. */
14380 if (kind == tcc_constant)
14381 return t;
14383 /* CALL_EXPR-like objects with variable numbers of operands are
14384 treated specially. */
14385 if (kind == tcc_vl_exp)
14387 if (code == CALL_EXPR)
14389 tem = fold_call_expr (loc, expr, false);
14390 return tem ? tem : expr;
14392 return expr;
14395 if (IS_EXPR_CODE_CLASS (kind))
14397 tree type = TREE_TYPE (t);
14398 tree op0, op1, op2;
14400 switch (TREE_CODE_LENGTH (code))
14402 case 1:
14403 op0 = TREE_OPERAND (t, 0);
14404 tem = fold_unary_loc (loc, code, type, op0);
14405 return tem ? tem : expr;
14406 case 2:
14407 op0 = TREE_OPERAND (t, 0);
14408 op1 = TREE_OPERAND (t, 1);
14409 tem = fold_binary_loc (loc, code, type, op0, op1);
14410 return tem ? tem : expr;
14411 case 3:
14412 op0 = TREE_OPERAND (t, 0);
14413 op1 = TREE_OPERAND (t, 1);
14414 op2 = TREE_OPERAND (t, 2);
14415 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14416 return tem ? tem : expr;
14417 default:
14418 break;
14422 switch (code)
14424 case ARRAY_REF:
14426 tree op0 = TREE_OPERAND (t, 0);
14427 tree op1 = TREE_OPERAND (t, 1);
14429 if (TREE_CODE (op1) == INTEGER_CST
14430 && TREE_CODE (op0) == CONSTRUCTOR
14431 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14433 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14434 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14435 unsigned HOST_WIDE_INT begin = 0;
14437 /* Find a matching index by means of a binary search. */
14438 while (begin != end)
14440 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14441 tree index = (*elts)[middle].index;
14443 if (TREE_CODE (index) == INTEGER_CST
14444 && tree_int_cst_lt (index, op1))
14445 begin = middle + 1;
14446 else if (TREE_CODE (index) == INTEGER_CST
14447 && tree_int_cst_lt (op1, index))
14448 end = middle;
14449 else if (TREE_CODE (index) == RANGE_EXPR
14450 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14451 begin = middle + 1;
14452 else if (TREE_CODE (index) == RANGE_EXPR
14453 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14454 end = middle;
14455 else
14456 return (*elts)[middle].value;
14460 return t;
14463 /* Return a VECTOR_CST if possible. */
14464 case CONSTRUCTOR:
14466 tree type = TREE_TYPE (t);
14467 if (TREE_CODE (type) != VECTOR_TYPE)
14468 return t;
14470 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14471 unsigned HOST_WIDE_INT idx, pos = 0;
14472 tree value;
14474 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14476 if (!CONSTANT_CLASS_P (value))
14477 return t;
14478 if (TREE_CODE (value) == VECTOR_CST)
14480 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14481 vec[pos++] = VECTOR_CST_ELT (value, i);
14483 else
14484 vec[pos++] = value;
14486 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14487 vec[pos] = build_zero_cst (TREE_TYPE (type));
14489 return build_vector (type, vec);
14492 case CONST_DECL:
14493 return fold (DECL_INITIAL (t));
14495 default:
14496 return t;
14497 } /* switch (code) */
14500 #ifdef ENABLE_FOLD_CHECKING
14501 #undef fold
14503 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14504 hash_table <pointer_hash <tree_node> >);
14505 static void fold_check_failed (const_tree, const_tree);
14506 void print_fold_checksum (const_tree);
14508 /* When --enable-checking=fold, compute a digest of expr before
14509 and after actual fold call to see if fold did not accidentally
14510 change original expr. */
14512 tree
14513 fold (tree expr)
14515 tree ret;
14516 struct md5_ctx ctx;
14517 unsigned char checksum_before[16], checksum_after[16];
14518 hash_table <pointer_hash <tree_node> > ht;
14520 ht.create (32);
14521 md5_init_ctx (&ctx);
14522 fold_checksum_tree (expr, &ctx, ht);
14523 md5_finish_ctx (&ctx, checksum_before);
14524 ht.empty ();
14526 ret = fold_1 (expr);
14528 md5_init_ctx (&ctx);
14529 fold_checksum_tree (expr, &ctx, ht);
14530 md5_finish_ctx (&ctx, checksum_after);
14531 ht.dispose ();
14533 if (memcmp (checksum_before, checksum_after, 16))
14534 fold_check_failed (expr, ret);
14536 return ret;
14539 void
14540 print_fold_checksum (const_tree expr)
14542 struct md5_ctx ctx;
14543 unsigned char checksum[16], cnt;
14544 hash_table <pointer_hash <tree_node> > ht;
14546 ht.create (32);
14547 md5_init_ctx (&ctx);
14548 fold_checksum_tree (expr, &ctx, ht);
14549 md5_finish_ctx (&ctx, checksum);
14550 ht.dispose ();
14551 for (cnt = 0; cnt < 16; ++cnt)
14552 fprintf (stderr, "%02x", checksum[cnt]);
14553 putc ('\n', stderr);
14556 static void
14557 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14559 internal_error ("fold check: original tree changed by fold");
14562 static void
14563 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14564 hash_table <pointer_hash <tree_node> > ht)
14566 tree_node **slot;
14567 enum tree_code code;
14568 union tree_node buf;
14569 int i, len;
14571 recursive_label:
14572 if (expr == NULL)
14573 return;
14574 slot = ht.find_slot (expr, INSERT);
14575 if (*slot != NULL)
14576 return;
14577 *slot = CONST_CAST_TREE (expr);
14578 code = TREE_CODE (expr);
14579 if (TREE_CODE_CLASS (code) == tcc_declaration
14580 && DECL_ASSEMBLER_NAME_SET_P (expr))
14582 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14583 memcpy ((char *) &buf, expr, tree_size (expr));
14584 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14585 expr = (tree) &buf;
14587 else if (TREE_CODE_CLASS (code) == tcc_type
14588 && (TYPE_POINTER_TO (expr)
14589 || TYPE_REFERENCE_TO (expr)
14590 || TYPE_CACHED_VALUES_P (expr)
14591 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14592 || TYPE_NEXT_VARIANT (expr)))
14594 /* Allow these fields to be modified. */
14595 tree tmp;
14596 memcpy ((char *) &buf, expr, tree_size (expr));
14597 expr = tmp = (tree) &buf;
14598 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14599 TYPE_POINTER_TO (tmp) = NULL;
14600 TYPE_REFERENCE_TO (tmp) = NULL;
14601 TYPE_NEXT_VARIANT (tmp) = NULL;
14602 if (TYPE_CACHED_VALUES_P (tmp))
14604 TYPE_CACHED_VALUES_P (tmp) = 0;
14605 TYPE_CACHED_VALUES (tmp) = NULL;
14608 md5_process_bytes (expr, tree_size (expr), ctx);
14609 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14610 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14611 if (TREE_CODE_CLASS (code) != tcc_type
14612 && TREE_CODE_CLASS (code) != tcc_declaration
14613 && code != TREE_LIST
14614 && code != SSA_NAME
14615 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14616 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14617 switch (TREE_CODE_CLASS (code))
14619 case tcc_constant:
14620 switch (code)
14622 case STRING_CST:
14623 md5_process_bytes (TREE_STRING_POINTER (expr),
14624 TREE_STRING_LENGTH (expr), ctx);
14625 break;
14626 case COMPLEX_CST:
14627 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14628 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14629 break;
14630 case VECTOR_CST:
14631 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14632 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14633 break;
14634 default:
14635 break;
14637 break;
14638 case tcc_exceptional:
14639 switch (code)
14641 case TREE_LIST:
14642 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14643 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14644 expr = TREE_CHAIN (expr);
14645 goto recursive_label;
14646 break;
14647 case TREE_VEC:
14648 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14649 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14650 break;
14651 default:
14652 break;
14654 break;
14655 case tcc_expression:
14656 case tcc_reference:
14657 case tcc_comparison:
14658 case tcc_unary:
14659 case tcc_binary:
14660 case tcc_statement:
14661 case tcc_vl_exp:
14662 len = TREE_OPERAND_LENGTH (expr);
14663 for (i = 0; i < len; ++i)
14664 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14665 break;
14666 case tcc_declaration:
14667 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14668 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14669 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14671 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14672 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14673 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14674 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14675 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14677 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14678 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14680 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14682 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14683 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14684 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14686 break;
14687 case tcc_type:
14688 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14689 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14690 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14691 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14692 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14693 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14694 if (INTEGRAL_TYPE_P (expr)
14695 || SCALAR_FLOAT_TYPE_P (expr))
14697 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14698 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14700 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14701 if (TREE_CODE (expr) == RECORD_TYPE
14702 || TREE_CODE (expr) == UNION_TYPE
14703 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14704 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14705 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14706 break;
14707 default:
14708 break;
14712 /* Helper function for outputting the checksum of a tree T. When
14713 debugging with gdb, you can "define mynext" to be "next" followed
14714 by "call debug_fold_checksum (op0)", then just trace down till the
14715 outputs differ. */
14717 DEBUG_FUNCTION void
14718 debug_fold_checksum (const_tree t)
14720 int i;
14721 unsigned char checksum[16];
14722 struct md5_ctx ctx;
14723 hash_table <pointer_hash <tree_node> > ht;
14724 ht.create (32);
14726 md5_init_ctx (&ctx);
14727 fold_checksum_tree (t, &ctx, ht);
14728 md5_finish_ctx (&ctx, checksum);
14729 ht.empty ();
14731 for (i = 0; i < 16; i++)
14732 fprintf (stderr, "%d ", checksum[i]);
14734 fprintf (stderr, "\n");
14737 #endif
14739 /* Fold a unary tree expression with code CODE of type TYPE with an
14740 operand OP0. LOC is the location of the resulting expression.
14741 Return a folded expression if successful. Otherwise, return a tree
14742 expression with code CODE of type TYPE with an operand OP0. */
14744 tree
14745 fold_build1_stat_loc (location_t loc,
14746 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14748 tree tem;
14749 #ifdef ENABLE_FOLD_CHECKING
14750 unsigned char checksum_before[16], checksum_after[16];
14751 struct md5_ctx ctx;
14752 hash_table <pointer_hash <tree_node> > ht;
14754 ht.create (32);
14755 md5_init_ctx (&ctx);
14756 fold_checksum_tree (op0, &ctx, ht);
14757 md5_finish_ctx (&ctx, checksum_before);
14758 ht.empty ();
14759 #endif
14761 tem = fold_unary_loc (loc, code, type, op0);
14762 if (!tem)
14763 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14765 #ifdef ENABLE_FOLD_CHECKING
14766 md5_init_ctx (&ctx);
14767 fold_checksum_tree (op0, &ctx, ht);
14768 md5_finish_ctx (&ctx, checksum_after);
14769 ht.dispose ();
14771 if (memcmp (checksum_before, checksum_after, 16))
14772 fold_check_failed (op0, tem);
14773 #endif
14774 return tem;
14777 /* Fold a binary tree expression with code CODE of type TYPE with
14778 operands OP0 and OP1. LOC is the location of the resulting
14779 expression. Return a folded expression if successful. Otherwise,
14780 return a tree expression with code CODE of type TYPE with operands
14781 OP0 and OP1. */
14783 tree
14784 fold_build2_stat_loc (location_t loc,
14785 enum tree_code code, tree type, tree op0, tree op1
14786 MEM_STAT_DECL)
14788 tree tem;
14789 #ifdef ENABLE_FOLD_CHECKING
14790 unsigned char checksum_before_op0[16],
14791 checksum_before_op1[16],
14792 checksum_after_op0[16],
14793 checksum_after_op1[16];
14794 struct md5_ctx ctx;
14795 hash_table <pointer_hash <tree_node> > ht;
14797 ht.create (32);
14798 md5_init_ctx (&ctx);
14799 fold_checksum_tree (op0, &ctx, ht);
14800 md5_finish_ctx (&ctx, checksum_before_op0);
14801 ht.empty ();
14803 md5_init_ctx (&ctx);
14804 fold_checksum_tree (op1, &ctx, ht);
14805 md5_finish_ctx (&ctx, checksum_before_op1);
14806 ht.empty ();
14807 #endif
14809 tem = fold_binary_loc (loc, code, type, op0, op1);
14810 if (!tem)
14811 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14813 #ifdef ENABLE_FOLD_CHECKING
14814 md5_init_ctx (&ctx);
14815 fold_checksum_tree (op0, &ctx, ht);
14816 md5_finish_ctx (&ctx, checksum_after_op0);
14817 ht.empty ();
14819 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14820 fold_check_failed (op0, tem);
14822 md5_init_ctx (&ctx);
14823 fold_checksum_tree (op1, &ctx, ht);
14824 md5_finish_ctx (&ctx, checksum_after_op1);
14825 ht.dispose ();
14827 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14828 fold_check_failed (op1, tem);
14829 #endif
14830 return tem;
14833 /* Fold a ternary tree expression with code CODE of type TYPE with
14834 operands OP0, OP1, and OP2. Return a folded expression if
14835 successful. Otherwise, return a tree expression with code CODE of
14836 type TYPE with operands OP0, OP1, and OP2. */
14838 tree
14839 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14840 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14842 tree tem;
14843 #ifdef ENABLE_FOLD_CHECKING
14844 unsigned char checksum_before_op0[16],
14845 checksum_before_op1[16],
14846 checksum_before_op2[16],
14847 checksum_after_op0[16],
14848 checksum_after_op1[16],
14849 checksum_after_op2[16];
14850 struct md5_ctx ctx;
14851 hash_table <pointer_hash <tree_node> > ht;
14853 ht.create (32);
14854 md5_init_ctx (&ctx);
14855 fold_checksum_tree (op0, &ctx, ht);
14856 md5_finish_ctx (&ctx, checksum_before_op0);
14857 ht.empty ();
14859 md5_init_ctx (&ctx);
14860 fold_checksum_tree (op1, &ctx, ht);
14861 md5_finish_ctx (&ctx, checksum_before_op1);
14862 ht.empty ();
14864 md5_init_ctx (&ctx);
14865 fold_checksum_tree (op2, &ctx, ht);
14866 md5_finish_ctx (&ctx, checksum_before_op2);
14867 ht.empty ();
14868 #endif
14870 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14871 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14872 if (!tem)
14873 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14875 #ifdef ENABLE_FOLD_CHECKING
14876 md5_init_ctx (&ctx);
14877 fold_checksum_tree (op0, &ctx, ht);
14878 md5_finish_ctx (&ctx, checksum_after_op0);
14879 ht.empty ();
14881 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14882 fold_check_failed (op0, tem);
14884 md5_init_ctx (&ctx);
14885 fold_checksum_tree (op1, &ctx, ht);
14886 md5_finish_ctx (&ctx, checksum_after_op1);
14887 ht.empty ();
14889 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14890 fold_check_failed (op1, tem);
14892 md5_init_ctx (&ctx);
14893 fold_checksum_tree (op2, &ctx, ht);
14894 md5_finish_ctx (&ctx, checksum_after_op2);
14895 ht.dispose ();
14897 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14898 fold_check_failed (op2, tem);
14899 #endif
14900 return tem;
14903 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14904 arguments in ARGARRAY, and a null static chain.
14905 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14906 of type TYPE from the given operands as constructed by build_call_array. */
14908 tree
14909 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14910 int nargs, tree *argarray)
14912 tree tem;
14913 #ifdef ENABLE_FOLD_CHECKING
14914 unsigned char checksum_before_fn[16],
14915 checksum_before_arglist[16],
14916 checksum_after_fn[16],
14917 checksum_after_arglist[16];
14918 struct md5_ctx ctx;
14919 hash_table <pointer_hash <tree_node> > ht;
14920 int i;
14922 ht.create (32);
14923 md5_init_ctx (&ctx);
14924 fold_checksum_tree (fn, &ctx, ht);
14925 md5_finish_ctx (&ctx, checksum_before_fn);
14926 ht.empty ();
14928 md5_init_ctx (&ctx);
14929 for (i = 0; i < nargs; i++)
14930 fold_checksum_tree (argarray[i], &ctx, ht);
14931 md5_finish_ctx (&ctx, checksum_before_arglist);
14932 ht.empty ();
14933 #endif
14935 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14937 #ifdef ENABLE_FOLD_CHECKING
14938 md5_init_ctx (&ctx);
14939 fold_checksum_tree (fn, &ctx, ht);
14940 md5_finish_ctx (&ctx, checksum_after_fn);
14941 ht.empty ();
14943 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14944 fold_check_failed (fn, tem);
14946 md5_init_ctx (&ctx);
14947 for (i = 0; i < nargs; i++)
14948 fold_checksum_tree (argarray[i], &ctx, ht);
14949 md5_finish_ctx (&ctx, checksum_after_arglist);
14950 ht.dispose ();
14952 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14953 fold_check_failed (NULL_TREE, tem);
14954 #endif
14955 return tem;
14958 /* Perform constant folding and related simplification of initializer
14959 expression EXPR. These behave identically to "fold_buildN" but ignore
14960 potential run-time traps and exceptions that fold must preserve. */
14962 #define START_FOLD_INIT \
14963 int saved_signaling_nans = flag_signaling_nans;\
14964 int saved_trapping_math = flag_trapping_math;\
14965 int saved_rounding_math = flag_rounding_math;\
14966 int saved_trapv = flag_trapv;\
14967 int saved_folding_initializer = folding_initializer;\
14968 flag_signaling_nans = 0;\
14969 flag_trapping_math = 0;\
14970 flag_rounding_math = 0;\
14971 flag_trapv = 0;\
14972 folding_initializer = 1;
14974 #define END_FOLD_INIT \
14975 flag_signaling_nans = saved_signaling_nans;\
14976 flag_trapping_math = saved_trapping_math;\
14977 flag_rounding_math = saved_rounding_math;\
14978 flag_trapv = saved_trapv;\
14979 folding_initializer = saved_folding_initializer;
14981 tree
14982 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14983 tree type, tree op)
14985 tree result;
14986 START_FOLD_INIT;
14988 result = fold_build1_loc (loc, code, type, op);
14990 END_FOLD_INIT;
14991 return result;
14994 tree
14995 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14996 tree type, tree op0, tree op1)
14998 tree result;
14999 START_FOLD_INIT;
15001 result = fold_build2_loc (loc, code, type, op0, op1);
15003 END_FOLD_INIT;
15004 return result;
15007 tree
15008 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15009 tree type, tree op0, tree op1, tree op2)
15011 tree result;
15012 START_FOLD_INIT;
15014 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15016 END_FOLD_INIT;
15017 return result;
15020 tree
15021 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15022 int nargs, tree *argarray)
15024 tree result;
15025 START_FOLD_INIT;
15027 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15029 END_FOLD_INIT;
15030 return result;
15033 #undef START_FOLD_INIT
15034 #undef END_FOLD_INIT
15036 /* Determine if first argument is a multiple of second argument. Return 0 if
15037 it is not, or we cannot easily determined it to be.
15039 An example of the sort of thing we care about (at this point; this routine
15040 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15041 fold cases do now) is discovering that
15043 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15045 is a multiple of
15047 SAVE_EXPR (J * 8)
15049 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15051 This code also handles discovering that
15053 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15055 is a multiple of 8 so we don't have to worry about dealing with a
15056 possible remainder.
15058 Note that we *look* inside a SAVE_EXPR only to determine how it was
15059 calculated; it is not safe for fold to do much of anything else with the
15060 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15061 at run time. For example, the latter example above *cannot* be implemented
15062 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15063 evaluation time of the original SAVE_EXPR is not necessarily the same at
15064 the time the new expression is evaluated. The only optimization of this
15065 sort that would be valid is changing
15067 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15069 divided by 8 to
15071 SAVE_EXPR (I) * SAVE_EXPR (J)
15073 (where the same SAVE_EXPR (J) is used in the original and the
15074 transformed version). */
15077 multiple_of_p (tree type, const_tree top, const_tree bottom)
15079 if (operand_equal_p (top, bottom, 0))
15080 return 1;
15082 if (TREE_CODE (type) != INTEGER_TYPE)
15083 return 0;
15085 switch (TREE_CODE (top))
15087 case BIT_AND_EXPR:
15088 /* Bitwise and provides a power of two multiple. If the mask is
15089 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15090 if (!integer_pow2p (bottom))
15091 return 0;
15092 /* FALLTHRU */
15094 case MULT_EXPR:
15095 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15096 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15098 case PLUS_EXPR:
15099 case MINUS_EXPR:
15100 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15101 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15103 case LSHIFT_EXPR:
15104 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15106 tree op1, t1;
15108 op1 = TREE_OPERAND (top, 1);
15109 /* const_binop may not detect overflow correctly,
15110 so check for it explicitly here. */
15111 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15112 > TREE_INT_CST_LOW (op1)
15113 && TREE_INT_CST_HIGH (op1) == 0
15114 && 0 != (t1 = fold_convert (type,
15115 const_binop (LSHIFT_EXPR,
15116 size_one_node,
15117 op1)))
15118 && !TREE_OVERFLOW (t1))
15119 return multiple_of_p (type, t1, bottom);
15121 return 0;
15123 case NOP_EXPR:
15124 /* Can't handle conversions from non-integral or wider integral type. */
15125 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15126 || (TYPE_PRECISION (type)
15127 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15128 return 0;
15130 /* .. fall through ... */
15132 case SAVE_EXPR:
15133 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15135 case COND_EXPR:
15136 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15137 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15139 case INTEGER_CST:
15140 if (TREE_CODE (bottom) != INTEGER_CST
15141 || integer_zerop (bottom)
15142 || (TYPE_UNSIGNED (type)
15143 && (tree_int_cst_sgn (top) < 0
15144 || tree_int_cst_sgn (bottom) < 0)))
15145 return 0;
15146 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15147 top, bottom));
15149 default:
15150 return 0;
15154 /* Return true if CODE or TYPE is known to be non-negative. */
15156 static bool
15157 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15159 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15160 && truth_value_p (code))
15161 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15162 have a signed:1 type (where the value is -1 and 0). */
15163 return true;
15164 return false;
15167 /* Return true if (CODE OP0) is known to be non-negative. If the return
15168 value is based on the assumption that signed overflow is undefined,
15169 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15170 *STRICT_OVERFLOW_P. */
15172 bool
15173 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15174 bool *strict_overflow_p)
15176 if (TYPE_UNSIGNED (type))
15177 return true;
15179 switch (code)
15181 case ABS_EXPR:
15182 /* We can't return 1 if flag_wrapv is set because
15183 ABS_EXPR<INT_MIN> = INT_MIN. */
15184 if (!INTEGRAL_TYPE_P (type))
15185 return true;
15186 if (TYPE_OVERFLOW_UNDEFINED (type))
15188 *strict_overflow_p = true;
15189 return true;
15191 break;
15193 case NON_LVALUE_EXPR:
15194 case FLOAT_EXPR:
15195 case FIX_TRUNC_EXPR:
15196 return tree_expr_nonnegative_warnv_p (op0,
15197 strict_overflow_p);
15199 case NOP_EXPR:
15201 tree inner_type = TREE_TYPE (op0);
15202 tree outer_type = type;
15204 if (TREE_CODE (outer_type) == REAL_TYPE)
15206 if (TREE_CODE (inner_type) == REAL_TYPE)
15207 return tree_expr_nonnegative_warnv_p (op0,
15208 strict_overflow_p);
15209 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15211 if (TYPE_UNSIGNED (inner_type))
15212 return true;
15213 return tree_expr_nonnegative_warnv_p (op0,
15214 strict_overflow_p);
15217 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15219 if (TREE_CODE (inner_type) == REAL_TYPE)
15220 return tree_expr_nonnegative_warnv_p (op0,
15221 strict_overflow_p);
15222 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15223 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15224 && TYPE_UNSIGNED (inner_type);
15227 break;
15229 default:
15230 return tree_simple_nonnegative_warnv_p (code, type);
15233 /* We don't know sign of `t', so be conservative and return false. */
15234 return false;
15237 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15238 value is based on the assumption that signed overflow is undefined,
15239 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15240 *STRICT_OVERFLOW_P. */
15242 bool
15243 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15244 tree op1, bool *strict_overflow_p)
15246 if (TYPE_UNSIGNED (type))
15247 return true;
15249 switch (code)
15251 case POINTER_PLUS_EXPR:
15252 case PLUS_EXPR:
15253 if (FLOAT_TYPE_P (type))
15254 return (tree_expr_nonnegative_warnv_p (op0,
15255 strict_overflow_p)
15256 && tree_expr_nonnegative_warnv_p (op1,
15257 strict_overflow_p));
15259 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15260 both unsigned and at least 2 bits shorter than the result. */
15261 if (TREE_CODE (type) == INTEGER_TYPE
15262 && TREE_CODE (op0) == NOP_EXPR
15263 && TREE_CODE (op1) == NOP_EXPR)
15265 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15266 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15267 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15268 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15270 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15271 TYPE_PRECISION (inner2)) + 1;
15272 return prec < TYPE_PRECISION (type);
15275 break;
15277 case MULT_EXPR:
15278 if (FLOAT_TYPE_P (type))
15280 /* x * x for floating point x is always non-negative. */
15281 if (operand_equal_p (op0, op1, 0))
15282 return true;
15283 return (tree_expr_nonnegative_warnv_p (op0,
15284 strict_overflow_p)
15285 && tree_expr_nonnegative_warnv_p (op1,
15286 strict_overflow_p));
15289 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15290 both unsigned and their total bits is shorter than the result. */
15291 if (TREE_CODE (type) == INTEGER_TYPE
15292 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15293 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15295 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15296 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15297 : TREE_TYPE (op0);
15298 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15299 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15300 : TREE_TYPE (op1);
15302 bool unsigned0 = TYPE_UNSIGNED (inner0);
15303 bool unsigned1 = TYPE_UNSIGNED (inner1);
15305 if (TREE_CODE (op0) == INTEGER_CST)
15306 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15308 if (TREE_CODE (op1) == INTEGER_CST)
15309 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15311 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15312 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15314 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15315 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15316 : TYPE_PRECISION (inner0);
15318 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15319 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15320 : TYPE_PRECISION (inner1);
15322 return precision0 + precision1 < TYPE_PRECISION (type);
15325 return false;
15327 case BIT_AND_EXPR:
15328 case MAX_EXPR:
15329 return (tree_expr_nonnegative_warnv_p (op0,
15330 strict_overflow_p)
15331 || tree_expr_nonnegative_warnv_p (op1,
15332 strict_overflow_p));
15334 case BIT_IOR_EXPR:
15335 case BIT_XOR_EXPR:
15336 case MIN_EXPR:
15337 case RDIV_EXPR:
15338 case TRUNC_DIV_EXPR:
15339 case CEIL_DIV_EXPR:
15340 case FLOOR_DIV_EXPR:
15341 case ROUND_DIV_EXPR:
15342 return (tree_expr_nonnegative_warnv_p (op0,
15343 strict_overflow_p)
15344 && tree_expr_nonnegative_warnv_p (op1,
15345 strict_overflow_p));
15347 case TRUNC_MOD_EXPR:
15348 case CEIL_MOD_EXPR:
15349 case FLOOR_MOD_EXPR:
15350 case ROUND_MOD_EXPR:
15351 return tree_expr_nonnegative_warnv_p (op0,
15352 strict_overflow_p);
15353 default:
15354 return tree_simple_nonnegative_warnv_p (code, type);
15357 /* We don't know sign of `t', so be conservative and return false. */
15358 return false;
15361 /* Return true if T is known to be non-negative. If the return
15362 value is based on the assumption that signed overflow is undefined,
15363 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15364 *STRICT_OVERFLOW_P. */
15366 bool
15367 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15369 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15370 return true;
15372 switch (TREE_CODE (t))
15374 case INTEGER_CST:
15375 return tree_int_cst_sgn (t) >= 0;
15377 case REAL_CST:
15378 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15380 case FIXED_CST:
15381 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15383 case COND_EXPR:
15384 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15385 strict_overflow_p)
15386 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15387 strict_overflow_p));
15388 default:
15389 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15390 TREE_TYPE (t));
15392 /* We don't know sign of `t', so be conservative and return false. */
15393 return false;
15396 /* Return true if T is known to be non-negative. If the return
15397 value is based on the assumption that signed overflow is undefined,
15398 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15399 *STRICT_OVERFLOW_P. */
15401 bool
15402 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15403 tree arg0, tree arg1, bool *strict_overflow_p)
15405 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15406 switch (DECL_FUNCTION_CODE (fndecl))
15408 CASE_FLT_FN (BUILT_IN_ACOS):
15409 CASE_FLT_FN (BUILT_IN_ACOSH):
15410 CASE_FLT_FN (BUILT_IN_CABS):
15411 CASE_FLT_FN (BUILT_IN_COSH):
15412 CASE_FLT_FN (BUILT_IN_ERFC):
15413 CASE_FLT_FN (BUILT_IN_EXP):
15414 CASE_FLT_FN (BUILT_IN_EXP10):
15415 CASE_FLT_FN (BUILT_IN_EXP2):
15416 CASE_FLT_FN (BUILT_IN_FABS):
15417 CASE_FLT_FN (BUILT_IN_FDIM):
15418 CASE_FLT_FN (BUILT_IN_HYPOT):
15419 CASE_FLT_FN (BUILT_IN_POW10):
15420 CASE_INT_FN (BUILT_IN_FFS):
15421 CASE_INT_FN (BUILT_IN_PARITY):
15422 CASE_INT_FN (BUILT_IN_POPCOUNT):
15423 case BUILT_IN_BSWAP32:
15424 case BUILT_IN_BSWAP64:
15425 /* Always true. */
15426 return true;
15428 CASE_FLT_FN (BUILT_IN_SQRT):
15429 /* sqrt(-0.0) is -0.0. */
15430 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15431 return true;
15432 return tree_expr_nonnegative_warnv_p (arg0,
15433 strict_overflow_p);
15435 CASE_FLT_FN (BUILT_IN_ASINH):
15436 CASE_FLT_FN (BUILT_IN_ATAN):
15437 CASE_FLT_FN (BUILT_IN_ATANH):
15438 CASE_FLT_FN (BUILT_IN_CBRT):
15439 CASE_FLT_FN (BUILT_IN_CEIL):
15440 CASE_FLT_FN (BUILT_IN_ERF):
15441 CASE_FLT_FN (BUILT_IN_EXPM1):
15442 CASE_FLT_FN (BUILT_IN_FLOOR):
15443 CASE_FLT_FN (BUILT_IN_FMOD):
15444 CASE_FLT_FN (BUILT_IN_FREXP):
15445 CASE_FLT_FN (BUILT_IN_ICEIL):
15446 CASE_FLT_FN (BUILT_IN_IFLOOR):
15447 CASE_FLT_FN (BUILT_IN_IRINT):
15448 CASE_FLT_FN (BUILT_IN_IROUND):
15449 CASE_FLT_FN (BUILT_IN_LCEIL):
15450 CASE_FLT_FN (BUILT_IN_LDEXP):
15451 CASE_FLT_FN (BUILT_IN_LFLOOR):
15452 CASE_FLT_FN (BUILT_IN_LLCEIL):
15453 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15454 CASE_FLT_FN (BUILT_IN_LLRINT):
15455 CASE_FLT_FN (BUILT_IN_LLROUND):
15456 CASE_FLT_FN (BUILT_IN_LRINT):
15457 CASE_FLT_FN (BUILT_IN_LROUND):
15458 CASE_FLT_FN (BUILT_IN_MODF):
15459 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15460 CASE_FLT_FN (BUILT_IN_RINT):
15461 CASE_FLT_FN (BUILT_IN_ROUND):
15462 CASE_FLT_FN (BUILT_IN_SCALB):
15463 CASE_FLT_FN (BUILT_IN_SCALBLN):
15464 CASE_FLT_FN (BUILT_IN_SCALBN):
15465 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15466 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15467 CASE_FLT_FN (BUILT_IN_SINH):
15468 CASE_FLT_FN (BUILT_IN_TANH):
15469 CASE_FLT_FN (BUILT_IN_TRUNC):
15470 /* True if the 1st argument is nonnegative. */
15471 return tree_expr_nonnegative_warnv_p (arg0,
15472 strict_overflow_p);
15474 CASE_FLT_FN (BUILT_IN_FMAX):
15475 /* True if the 1st OR 2nd arguments are nonnegative. */
15476 return (tree_expr_nonnegative_warnv_p (arg0,
15477 strict_overflow_p)
15478 || (tree_expr_nonnegative_warnv_p (arg1,
15479 strict_overflow_p)));
15481 CASE_FLT_FN (BUILT_IN_FMIN):
15482 /* True if the 1st AND 2nd arguments are nonnegative. */
15483 return (tree_expr_nonnegative_warnv_p (arg0,
15484 strict_overflow_p)
15485 && (tree_expr_nonnegative_warnv_p (arg1,
15486 strict_overflow_p)));
15488 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15489 /* True if the 2nd argument is nonnegative. */
15490 return tree_expr_nonnegative_warnv_p (arg1,
15491 strict_overflow_p);
15493 CASE_FLT_FN (BUILT_IN_POWI):
15494 /* True if the 1st argument is nonnegative or the second
15495 argument is an even integer. */
15496 if (TREE_CODE (arg1) == INTEGER_CST
15497 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15498 return true;
15499 return tree_expr_nonnegative_warnv_p (arg0,
15500 strict_overflow_p);
15502 CASE_FLT_FN (BUILT_IN_POW):
15503 /* True if the 1st argument is nonnegative or the second
15504 argument is an even integer valued real. */
15505 if (TREE_CODE (arg1) == REAL_CST)
15507 REAL_VALUE_TYPE c;
15508 HOST_WIDE_INT n;
15510 c = TREE_REAL_CST (arg1);
15511 n = real_to_integer (&c);
15512 if ((n & 1) == 0)
15514 REAL_VALUE_TYPE cint;
15515 real_from_integer (&cint, VOIDmode, n,
15516 n < 0 ? -1 : 0, 0);
15517 if (real_identical (&c, &cint))
15518 return true;
15521 return tree_expr_nonnegative_warnv_p (arg0,
15522 strict_overflow_p);
15524 default:
15525 break;
15527 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15528 type);
15531 /* Return true if T is known to be non-negative. If the return
15532 value is based on the assumption that signed overflow is undefined,
15533 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15534 *STRICT_OVERFLOW_P. */
15536 bool
15537 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15539 enum tree_code code = TREE_CODE (t);
15540 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15541 return true;
15543 switch (code)
15545 case TARGET_EXPR:
15547 tree temp = TARGET_EXPR_SLOT (t);
15548 t = TARGET_EXPR_INITIAL (t);
15550 /* If the initializer is non-void, then it's a normal expression
15551 that will be assigned to the slot. */
15552 if (!VOID_TYPE_P (t))
15553 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15555 /* Otherwise, the initializer sets the slot in some way. One common
15556 way is an assignment statement at the end of the initializer. */
15557 while (1)
15559 if (TREE_CODE (t) == BIND_EXPR)
15560 t = expr_last (BIND_EXPR_BODY (t));
15561 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15562 || TREE_CODE (t) == TRY_CATCH_EXPR)
15563 t = expr_last (TREE_OPERAND (t, 0));
15564 else if (TREE_CODE (t) == STATEMENT_LIST)
15565 t = expr_last (t);
15566 else
15567 break;
15569 if (TREE_CODE (t) == MODIFY_EXPR
15570 && TREE_OPERAND (t, 0) == temp)
15571 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15572 strict_overflow_p);
15574 return false;
15577 case CALL_EXPR:
15579 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15580 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15582 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15583 get_callee_fndecl (t),
15584 arg0,
15585 arg1,
15586 strict_overflow_p);
15588 case COMPOUND_EXPR:
15589 case MODIFY_EXPR:
15590 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15591 strict_overflow_p);
15592 case BIND_EXPR:
15593 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15594 strict_overflow_p);
15595 case SAVE_EXPR:
15596 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15597 strict_overflow_p);
15599 default:
15600 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15601 TREE_TYPE (t));
15604 /* We don't know sign of `t', so be conservative and return false. */
15605 return false;
15608 /* Return true if T is known to be non-negative. If the return
15609 value is based on the assumption that signed overflow is undefined,
15610 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15611 *STRICT_OVERFLOW_P. */
15613 bool
15614 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15616 enum tree_code code;
15617 if (t == error_mark_node)
15618 return false;
15620 code = TREE_CODE (t);
15621 switch (TREE_CODE_CLASS (code))
15623 case tcc_binary:
15624 case tcc_comparison:
15625 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15626 TREE_TYPE (t),
15627 TREE_OPERAND (t, 0),
15628 TREE_OPERAND (t, 1),
15629 strict_overflow_p);
15631 case tcc_unary:
15632 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15633 TREE_TYPE (t),
15634 TREE_OPERAND (t, 0),
15635 strict_overflow_p);
15637 case tcc_constant:
15638 case tcc_declaration:
15639 case tcc_reference:
15640 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15642 default:
15643 break;
15646 switch (code)
15648 case TRUTH_AND_EXPR:
15649 case TRUTH_OR_EXPR:
15650 case TRUTH_XOR_EXPR:
15651 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15652 TREE_TYPE (t),
15653 TREE_OPERAND (t, 0),
15654 TREE_OPERAND (t, 1),
15655 strict_overflow_p);
15656 case TRUTH_NOT_EXPR:
15657 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15658 TREE_TYPE (t),
15659 TREE_OPERAND (t, 0),
15660 strict_overflow_p);
15662 case COND_EXPR:
15663 case CONSTRUCTOR:
15664 case OBJ_TYPE_REF:
15665 case ASSERT_EXPR:
15666 case ADDR_EXPR:
15667 case WITH_SIZE_EXPR:
15668 case SSA_NAME:
15669 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15671 default:
15672 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15676 /* Return true if `t' is known to be non-negative. Handle warnings
15677 about undefined signed overflow. */
15679 bool
15680 tree_expr_nonnegative_p (tree t)
15682 bool ret, strict_overflow_p;
15684 strict_overflow_p = false;
15685 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15686 if (strict_overflow_p)
15687 fold_overflow_warning (("assuming signed overflow does not occur when "
15688 "determining that expression is always "
15689 "non-negative"),
15690 WARN_STRICT_OVERFLOW_MISC);
15691 return ret;
15695 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15696 For floating point we further ensure that T is not denormal.
15697 Similar logic is present in nonzero_address in rtlanal.h.
15699 If the return value is based on the assumption that signed overflow
15700 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15701 change *STRICT_OVERFLOW_P. */
15703 bool
15704 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15705 bool *strict_overflow_p)
15707 switch (code)
15709 case ABS_EXPR:
15710 return tree_expr_nonzero_warnv_p (op0,
15711 strict_overflow_p);
15713 case NOP_EXPR:
15715 tree inner_type = TREE_TYPE (op0);
15716 tree outer_type = type;
15718 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15719 && tree_expr_nonzero_warnv_p (op0,
15720 strict_overflow_p));
15722 break;
15724 case NON_LVALUE_EXPR:
15725 return tree_expr_nonzero_warnv_p (op0,
15726 strict_overflow_p);
15728 default:
15729 break;
15732 return false;
15735 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15736 For floating point we further ensure that T is not denormal.
15737 Similar logic is present in nonzero_address in rtlanal.h.
15739 If the return value is based on the assumption that signed overflow
15740 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15741 change *STRICT_OVERFLOW_P. */
15743 bool
15744 tree_binary_nonzero_warnv_p (enum tree_code code,
15745 tree type,
15746 tree op0,
15747 tree op1, bool *strict_overflow_p)
15749 bool sub_strict_overflow_p;
15750 switch (code)
15752 case POINTER_PLUS_EXPR:
15753 case PLUS_EXPR:
15754 if (TYPE_OVERFLOW_UNDEFINED (type))
15756 /* With the presence of negative values it is hard
15757 to say something. */
15758 sub_strict_overflow_p = false;
15759 if (!tree_expr_nonnegative_warnv_p (op0,
15760 &sub_strict_overflow_p)
15761 || !tree_expr_nonnegative_warnv_p (op1,
15762 &sub_strict_overflow_p))
15763 return false;
15764 /* One of operands must be positive and the other non-negative. */
15765 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15766 overflows, on a twos-complement machine the sum of two
15767 nonnegative numbers can never be zero. */
15768 return (tree_expr_nonzero_warnv_p (op0,
15769 strict_overflow_p)
15770 || tree_expr_nonzero_warnv_p (op1,
15771 strict_overflow_p));
15773 break;
15775 case MULT_EXPR:
15776 if (TYPE_OVERFLOW_UNDEFINED (type))
15778 if (tree_expr_nonzero_warnv_p (op0,
15779 strict_overflow_p)
15780 && tree_expr_nonzero_warnv_p (op1,
15781 strict_overflow_p))
15783 *strict_overflow_p = true;
15784 return true;
15787 break;
15789 case MIN_EXPR:
15790 sub_strict_overflow_p = false;
15791 if (tree_expr_nonzero_warnv_p (op0,
15792 &sub_strict_overflow_p)
15793 && tree_expr_nonzero_warnv_p (op1,
15794 &sub_strict_overflow_p))
15796 if (sub_strict_overflow_p)
15797 *strict_overflow_p = true;
15799 break;
15801 case MAX_EXPR:
15802 sub_strict_overflow_p = false;
15803 if (tree_expr_nonzero_warnv_p (op0,
15804 &sub_strict_overflow_p))
15806 if (sub_strict_overflow_p)
15807 *strict_overflow_p = true;
15809 /* When both operands are nonzero, then MAX must be too. */
15810 if (tree_expr_nonzero_warnv_p (op1,
15811 strict_overflow_p))
15812 return true;
15814 /* MAX where operand 0 is positive is positive. */
15815 return tree_expr_nonnegative_warnv_p (op0,
15816 strict_overflow_p);
15818 /* MAX where operand 1 is positive is positive. */
15819 else if (tree_expr_nonzero_warnv_p (op1,
15820 &sub_strict_overflow_p)
15821 && tree_expr_nonnegative_warnv_p (op1,
15822 &sub_strict_overflow_p))
15824 if (sub_strict_overflow_p)
15825 *strict_overflow_p = true;
15826 return true;
15828 break;
15830 case BIT_IOR_EXPR:
15831 return (tree_expr_nonzero_warnv_p (op1,
15832 strict_overflow_p)
15833 || tree_expr_nonzero_warnv_p (op0,
15834 strict_overflow_p));
15836 default:
15837 break;
15840 return false;
15843 /* Return true when T is an address and is known to be nonzero.
15844 For floating point we further ensure that T is not denormal.
15845 Similar logic is present in nonzero_address in rtlanal.h.
15847 If the return value is based on the assumption that signed overflow
15848 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15849 change *STRICT_OVERFLOW_P. */
15851 bool
15852 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15854 bool sub_strict_overflow_p;
15855 switch (TREE_CODE (t))
15857 case INTEGER_CST:
15858 return !integer_zerop (t);
15860 case ADDR_EXPR:
15862 tree base = TREE_OPERAND (t, 0);
15863 if (!DECL_P (base))
15864 base = get_base_address (base);
15866 if (!base)
15867 return false;
15869 /* Weak declarations may link to NULL. Other things may also be NULL
15870 so protect with -fdelete-null-pointer-checks; but not variables
15871 allocated on the stack. */
15872 if (DECL_P (base)
15873 && (flag_delete_null_pointer_checks
15874 || (DECL_CONTEXT (base)
15875 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15876 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15877 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15879 /* Constants are never weak. */
15880 if (CONSTANT_CLASS_P (base))
15881 return true;
15883 return false;
15886 case COND_EXPR:
15887 sub_strict_overflow_p = false;
15888 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15889 &sub_strict_overflow_p)
15890 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15891 &sub_strict_overflow_p))
15893 if (sub_strict_overflow_p)
15894 *strict_overflow_p = true;
15895 return true;
15897 break;
15899 default:
15900 break;
15902 return false;
15905 /* Return true when T is an address and is known to be nonzero.
15906 For floating point we further ensure that T is not denormal.
15907 Similar logic is present in nonzero_address in rtlanal.h.
15909 If the return value is based on the assumption that signed overflow
15910 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15911 change *STRICT_OVERFLOW_P. */
15913 bool
15914 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15916 tree type = TREE_TYPE (t);
15917 enum tree_code code;
15919 /* Doing something useful for floating point would need more work. */
15920 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15921 return false;
15923 code = TREE_CODE (t);
15924 switch (TREE_CODE_CLASS (code))
15926 case tcc_unary:
15927 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15928 strict_overflow_p);
15929 case tcc_binary:
15930 case tcc_comparison:
15931 return tree_binary_nonzero_warnv_p (code, type,
15932 TREE_OPERAND (t, 0),
15933 TREE_OPERAND (t, 1),
15934 strict_overflow_p);
15935 case tcc_constant:
15936 case tcc_declaration:
15937 case tcc_reference:
15938 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15940 default:
15941 break;
15944 switch (code)
15946 case TRUTH_NOT_EXPR:
15947 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15948 strict_overflow_p);
15950 case TRUTH_AND_EXPR:
15951 case TRUTH_OR_EXPR:
15952 case TRUTH_XOR_EXPR:
15953 return tree_binary_nonzero_warnv_p (code, type,
15954 TREE_OPERAND (t, 0),
15955 TREE_OPERAND (t, 1),
15956 strict_overflow_p);
15958 case COND_EXPR:
15959 case CONSTRUCTOR:
15960 case OBJ_TYPE_REF:
15961 case ASSERT_EXPR:
15962 case ADDR_EXPR:
15963 case WITH_SIZE_EXPR:
15964 case SSA_NAME:
15965 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15967 case COMPOUND_EXPR:
15968 case MODIFY_EXPR:
15969 case BIND_EXPR:
15970 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15971 strict_overflow_p);
15973 case SAVE_EXPR:
15974 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15975 strict_overflow_p);
15977 case CALL_EXPR:
15978 return alloca_call_p (t);
15980 default:
15981 break;
15983 return false;
15986 /* Return true when T is an address and is known to be nonzero.
15987 Handle warnings about undefined signed overflow. */
15989 bool
15990 tree_expr_nonzero_p (tree t)
15992 bool ret, strict_overflow_p;
15994 strict_overflow_p = false;
15995 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15996 if (strict_overflow_p)
15997 fold_overflow_warning (("assuming signed overflow does not occur when "
15998 "determining that expression is always "
15999 "non-zero"),
16000 WARN_STRICT_OVERFLOW_MISC);
16001 return ret;
16004 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16005 attempt to fold the expression to a constant without modifying TYPE,
16006 OP0 or OP1.
16008 If the expression could be simplified to a constant, then return
16009 the constant. If the expression would not be simplified to a
16010 constant, then return NULL_TREE. */
16012 tree
16013 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16015 tree tem = fold_binary (code, type, op0, op1);
16016 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16019 /* Given the components of a unary expression CODE, TYPE and OP0,
16020 attempt to fold the expression to a constant without modifying
16021 TYPE or OP0.
16023 If the expression could be simplified to a constant, then return
16024 the constant. If the expression would not be simplified to a
16025 constant, then return NULL_TREE. */
16027 tree
16028 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16030 tree tem = fold_unary (code, type, op0);
16031 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16034 /* If EXP represents referencing an element in a constant string
16035 (either via pointer arithmetic or array indexing), return the
16036 tree representing the value accessed, otherwise return NULL. */
16038 tree
16039 fold_read_from_constant_string (tree exp)
16041 if ((TREE_CODE (exp) == INDIRECT_REF
16042 || TREE_CODE (exp) == ARRAY_REF)
16043 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16045 tree exp1 = TREE_OPERAND (exp, 0);
16046 tree index;
16047 tree string;
16048 location_t loc = EXPR_LOCATION (exp);
16050 if (TREE_CODE (exp) == INDIRECT_REF)
16051 string = string_constant (exp1, &index);
16052 else
16054 tree low_bound = array_ref_low_bound (exp);
16055 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16057 /* Optimize the special-case of a zero lower bound.
16059 We convert the low_bound to sizetype to avoid some problems
16060 with constant folding. (E.g. suppose the lower bound is 1,
16061 and its mode is QI. Without the conversion,l (ARRAY
16062 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16063 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16064 if (! integer_zerop (low_bound))
16065 index = size_diffop_loc (loc, index,
16066 fold_convert_loc (loc, sizetype, low_bound));
16068 string = exp1;
16071 if (string
16072 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16073 && TREE_CODE (string) == STRING_CST
16074 && TREE_CODE (index) == INTEGER_CST
16075 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16076 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16077 == MODE_INT)
16078 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16079 return build_int_cst_type (TREE_TYPE (exp),
16080 (TREE_STRING_POINTER (string)
16081 [TREE_INT_CST_LOW (index)]));
16083 return NULL;
16086 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16087 an integer constant, real, or fixed-point constant.
16089 TYPE is the type of the result. */
16091 static tree
16092 fold_negate_const (tree arg0, tree type)
16094 tree t = NULL_TREE;
16096 switch (TREE_CODE (arg0))
16098 case INTEGER_CST:
16100 double_int val = tree_to_double_int (arg0);
16101 bool overflow;
16102 val = val.neg_with_overflow (&overflow);
16103 t = force_fit_type_double (type, val, 1,
16104 (overflow | TREE_OVERFLOW (arg0))
16105 && !TYPE_UNSIGNED (type));
16106 break;
16109 case REAL_CST:
16110 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16111 break;
16113 case FIXED_CST:
16115 FIXED_VALUE_TYPE f;
16116 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16117 &(TREE_FIXED_CST (arg0)), NULL,
16118 TYPE_SATURATING (type));
16119 t = build_fixed (type, f);
16120 /* Propagate overflow flags. */
16121 if (overflow_p | TREE_OVERFLOW (arg0))
16122 TREE_OVERFLOW (t) = 1;
16123 break;
16126 default:
16127 gcc_unreachable ();
16130 return t;
16133 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16134 an integer constant or real constant.
16136 TYPE is the type of the result. */
16138 tree
16139 fold_abs_const (tree arg0, tree type)
16141 tree t = NULL_TREE;
16143 switch (TREE_CODE (arg0))
16145 case INTEGER_CST:
16147 double_int val = tree_to_double_int (arg0);
16149 /* If the value is unsigned or non-negative, then the absolute value
16150 is the same as the ordinary value. */
16151 if (TYPE_UNSIGNED (type)
16152 || !val.is_negative ())
16153 t = arg0;
16155 /* If the value is negative, then the absolute value is
16156 its negation. */
16157 else
16159 bool overflow;
16160 val = val.neg_with_overflow (&overflow);
16161 t = force_fit_type_double (type, val, -1,
16162 overflow | TREE_OVERFLOW (arg0));
16165 break;
16167 case REAL_CST:
16168 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16169 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16170 else
16171 t = arg0;
16172 break;
16174 default:
16175 gcc_unreachable ();
16178 return t;
16181 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16182 constant. TYPE is the type of the result. */
16184 static tree
16185 fold_not_const (const_tree arg0, tree type)
16187 double_int val;
16189 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16191 val = ~tree_to_double_int (arg0);
16192 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16195 /* Given CODE, a relational operator, the target type, TYPE and two
16196 constant operands OP0 and OP1, return the result of the
16197 relational operation. If the result is not a compile time
16198 constant, then return NULL_TREE. */
16200 static tree
16201 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16203 int result, invert;
16205 /* From here on, the only cases we handle are when the result is
16206 known to be a constant. */
16208 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16210 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16211 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16213 /* Handle the cases where either operand is a NaN. */
16214 if (real_isnan (c0) || real_isnan (c1))
16216 switch (code)
16218 case EQ_EXPR:
16219 case ORDERED_EXPR:
16220 result = 0;
16221 break;
16223 case NE_EXPR:
16224 case UNORDERED_EXPR:
16225 case UNLT_EXPR:
16226 case UNLE_EXPR:
16227 case UNGT_EXPR:
16228 case UNGE_EXPR:
16229 case UNEQ_EXPR:
16230 result = 1;
16231 break;
16233 case LT_EXPR:
16234 case LE_EXPR:
16235 case GT_EXPR:
16236 case GE_EXPR:
16237 case LTGT_EXPR:
16238 if (flag_trapping_math)
16239 return NULL_TREE;
16240 result = 0;
16241 break;
16243 default:
16244 gcc_unreachable ();
16247 return constant_boolean_node (result, type);
16250 return constant_boolean_node (real_compare (code, c0, c1), type);
16253 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16255 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16256 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16257 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16260 /* Handle equality/inequality of complex constants. */
16261 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16263 tree rcond = fold_relational_const (code, type,
16264 TREE_REALPART (op0),
16265 TREE_REALPART (op1));
16266 tree icond = fold_relational_const (code, type,
16267 TREE_IMAGPART (op0),
16268 TREE_IMAGPART (op1));
16269 if (code == EQ_EXPR)
16270 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16271 else if (code == NE_EXPR)
16272 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16273 else
16274 return NULL_TREE;
16277 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16279 unsigned count = VECTOR_CST_NELTS (op0);
16280 tree *elts = XALLOCAVEC (tree, count);
16281 gcc_assert (VECTOR_CST_NELTS (op1) == count
16282 && TYPE_VECTOR_SUBPARTS (type) == count);
16284 for (unsigned i = 0; i < count; i++)
16286 tree elem_type = TREE_TYPE (type);
16287 tree elem0 = VECTOR_CST_ELT (op0, i);
16288 tree elem1 = VECTOR_CST_ELT (op1, i);
16290 tree tem = fold_relational_const (code, elem_type,
16291 elem0, elem1);
16293 if (tem == NULL_TREE)
16294 return NULL_TREE;
16296 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16299 return build_vector (type, elts);
16302 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16304 To compute GT, swap the arguments and do LT.
16305 To compute GE, do LT and invert the result.
16306 To compute LE, swap the arguments, do LT and invert the result.
16307 To compute NE, do EQ and invert the result.
16309 Therefore, the code below must handle only EQ and LT. */
16311 if (code == LE_EXPR || code == GT_EXPR)
16313 tree tem = op0;
16314 op0 = op1;
16315 op1 = tem;
16316 code = swap_tree_comparison (code);
16319 /* Note that it is safe to invert for real values here because we
16320 have already handled the one case that it matters. */
16322 invert = 0;
16323 if (code == NE_EXPR || code == GE_EXPR)
16325 invert = 1;
16326 code = invert_tree_comparison (code, false);
16329 /* Compute a result for LT or EQ if args permit;
16330 Otherwise return T. */
16331 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16333 if (code == EQ_EXPR)
16334 result = tree_int_cst_equal (op0, op1);
16335 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16336 result = INT_CST_LT_UNSIGNED (op0, op1);
16337 else
16338 result = INT_CST_LT (op0, op1);
16340 else
16341 return NULL_TREE;
16343 if (invert)
16344 result ^= 1;
16345 return constant_boolean_node (result, type);
16348 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16349 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16350 itself. */
16352 tree
16353 fold_build_cleanup_point_expr (tree type, tree expr)
16355 /* If the expression does not have side effects then we don't have to wrap
16356 it with a cleanup point expression. */
16357 if (!TREE_SIDE_EFFECTS (expr))
16358 return expr;
16360 /* If the expression is a return, check to see if the expression inside the
16361 return has no side effects or the right hand side of the modify expression
16362 inside the return. If either don't have side effects set we don't need to
16363 wrap the expression in a cleanup point expression. Note we don't check the
16364 left hand side of the modify because it should always be a return decl. */
16365 if (TREE_CODE (expr) == RETURN_EXPR)
16367 tree op = TREE_OPERAND (expr, 0);
16368 if (!op || !TREE_SIDE_EFFECTS (op))
16369 return expr;
16370 op = TREE_OPERAND (op, 1);
16371 if (!TREE_SIDE_EFFECTS (op))
16372 return expr;
16375 return build1 (CLEANUP_POINT_EXPR, type, expr);
16378 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16379 of an indirection through OP0, or NULL_TREE if no simplification is
16380 possible. */
16382 tree
16383 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16385 tree sub = op0;
16386 tree subtype;
16388 STRIP_NOPS (sub);
16389 subtype = TREE_TYPE (sub);
16390 if (!POINTER_TYPE_P (subtype))
16391 return NULL_TREE;
16393 if (TREE_CODE (sub) == ADDR_EXPR)
16395 tree op = TREE_OPERAND (sub, 0);
16396 tree optype = TREE_TYPE (op);
16397 /* *&CONST_DECL -> to the value of the const decl. */
16398 if (TREE_CODE (op) == CONST_DECL)
16399 return DECL_INITIAL (op);
16400 /* *&p => p; make sure to handle *&"str"[cst] here. */
16401 if (type == optype)
16403 tree fop = fold_read_from_constant_string (op);
16404 if (fop)
16405 return fop;
16406 else
16407 return op;
16409 /* *(foo *)&fooarray => fooarray[0] */
16410 else if (TREE_CODE (optype) == ARRAY_TYPE
16411 && type == TREE_TYPE (optype)
16412 && (!in_gimple_form
16413 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16415 tree type_domain = TYPE_DOMAIN (optype);
16416 tree min_val = size_zero_node;
16417 if (type_domain && TYPE_MIN_VALUE (type_domain))
16418 min_val = TYPE_MIN_VALUE (type_domain);
16419 if (in_gimple_form
16420 && TREE_CODE (min_val) != INTEGER_CST)
16421 return NULL_TREE;
16422 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16423 NULL_TREE, NULL_TREE);
16425 /* *(foo *)&complexfoo => __real__ complexfoo */
16426 else if (TREE_CODE (optype) == COMPLEX_TYPE
16427 && type == TREE_TYPE (optype))
16428 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16429 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16430 else if (TREE_CODE (optype) == VECTOR_TYPE
16431 && type == TREE_TYPE (optype))
16433 tree part_width = TYPE_SIZE (type);
16434 tree index = bitsize_int (0);
16435 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16439 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16440 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16442 tree op00 = TREE_OPERAND (sub, 0);
16443 tree op01 = TREE_OPERAND (sub, 1);
16445 STRIP_NOPS (op00);
16446 if (TREE_CODE (op00) == ADDR_EXPR)
16448 tree op00type;
16449 op00 = TREE_OPERAND (op00, 0);
16450 op00type = TREE_TYPE (op00);
16452 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16453 if (TREE_CODE (op00type) == VECTOR_TYPE
16454 && type == TREE_TYPE (op00type))
16456 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16457 tree part_width = TYPE_SIZE (type);
16458 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16459 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16460 tree index = bitsize_int (indexi);
16462 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16463 return fold_build3_loc (loc,
16464 BIT_FIELD_REF, type, op00,
16465 part_width, index);
16468 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16469 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16470 && type == TREE_TYPE (op00type))
16472 tree size = TYPE_SIZE_UNIT (type);
16473 if (tree_int_cst_equal (size, op01))
16474 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16476 /* ((foo *)&fooarray)[1] => fooarray[1] */
16477 else if (TREE_CODE (op00type) == ARRAY_TYPE
16478 && type == TREE_TYPE (op00type))
16480 tree type_domain = TYPE_DOMAIN (op00type);
16481 tree min_val = size_zero_node;
16482 if (type_domain && TYPE_MIN_VALUE (type_domain))
16483 min_val = TYPE_MIN_VALUE (type_domain);
16484 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16485 TYPE_SIZE_UNIT (type));
16486 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16487 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16488 NULL_TREE, NULL_TREE);
16493 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16494 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16495 && type == TREE_TYPE (TREE_TYPE (subtype))
16496 && (!in_gimple_form
16497 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16499 tree type_domain;
16500 tree min_val = size_zero_node;
16501 sub = build_fold_indirect_ref_loc (loc, sub);
16502 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16503 if (type_domain && TYPE_MIN_VALUE (type_domain))
16504 min_val = TYPE_MIN_VALUE (type_domain);
16505 if (in_gimple_form
16506 && TREE_CODE (min_val) != INTEGER_CST)
16507 return NULL_TREE;
16508 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16509 NULL_TREE);
16512 return NULL_TREE;
16515 /* Builds an expression for an indirection through T, simplifying some
16516 cases. */
16518 tree
16519 build_fold_indirect_ref_loc (location_t loc, tree t)
16521 tree type = TREE_TYPE (TREE_TYPE (t));
16522 tree sub = fold_indirect_ref_1 (loc, type, t);
16524 if (sub)
16525 return sub;
16527 return build1_loc (loc, INDIRECT_REF, type, t);
16530 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16532 tree
16533 fold_indirect_ref_loc (location_t loc, tree t)
16535 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16537 if (sub)
16538 return sub;
16539 else
16540 return t;
16543 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16544 whose result is ignored. The type of the returned tree need not be
16545 the same as the original expression. */
16547 tree
16548 fold_ignored_result (tree t)
16550 if (!TREE_SIDE_EFFECTS (t))
16551 return integer_zero_node;
16553 for (;;)
16554 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16556 case tcc_unary:
16557 t = TREE_OPERAND (t, 0);
16558 break;
16560 case tcc_binary:
16561 case tcc_comparison:
16562 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16563 t = TREE_OPERAND (t, 0);
16564 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16565 t = TREE_OPERAND (t, 1);
16566 else
16567 return t;
16568 break;
16570 case tcc_expression:
16571 switch (TREE_CODE (t))
16573 case COMPOUND_EXPR:
16574 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16575 return t;
16576 t = TREE_OPERAND (t, 0);
16577 break;
16579 case COND_EXPR:
16580 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16581 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16582 return t;
16583 t = TREE_OPERAND (t, 0);
16584 break;
16586 default:
16587 return t;
16589 break;
16591 default:
16592 return t;
16596 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16597 This can only be applied to objects of a sizetype. */
16599 tree
16600 round_up_loc (location_t loc, tree value, int divisor)
16602 tree div = NULL_TREE;
16604 gcc_assert (divisor > 0);
16605 if (divisor == 1)
16606 return value;
16608 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16609 have to do anything. Only do this when we are not given a const,
16610 because in that case, this check is more expensive than just
16611 doing it. */
16612 if (TREE_CODE (value) != INTEGER_CST)
16614 div = build_int_cst (TREE_TYPE (value), divisor);
16616 if (multiple_of_p (TREE_TYPE (value), value, div))
16617 return value;
16620 /* If divisor is a power of two, simplify this to bit manipulation. */
16621 if (divisor == (divisor & -divisor))
16623 if (TREE_CODE (value) == INTEGER_CST)
16625 double_int val = tree_to_double_int (value);
16626 bool overflow_p;
16628 if ((val.low & (divisor - 1)) == 0)
16629 return value;
16631 overflow_p = TREE_OVERFLOW (value);
16632 val.low &= ~(divisor - 1);
16633 val.low += divisor;
16634 if (val.low == 0)
16636 val.high++;
16637 if (val.high == 0)
16638 overflow_p = true;
16641 return force_fit_type_double (TREE_TYPE (value), val,
16642 -1, overflow_p);
16644 else
16646 tree t;
16648 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16649 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16650 t = build_int_cst (TREE_TYPE (value), -divisor);
16651 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16654 else
16656 if (!div)
16657 div = build_int_cst (TREE_TYPE (value), divisor);
16658 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16659 value = size_binop_loc (loc, MULT_EXPR, value, div);
16662 return value;
16665 /* Likewise, but round down. */
16667 tree
16668 round_down_loc (location_t loc, tree value, int divisor)
16670 tree div = NULL_TREE;
16672 gcc_assert (divisor > 0);
16673 if (divisor == 1)
16674 return value;
16676 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16677 have to do anything. Only do this when we are not given a const,
16678 because in that case, this check is more expensive than just
16679 doing it. */
16680 if (TREE_CODE (value) != INTEGER_CST)
16682 div = build_int_cst (TREE_TYPE (value), divisor);
16684 if (multiple_of_p (TREE_TYPE (value), value, div))
16685 return value;
16688 /* If divisor is a power of two, simplify this to bit manipulation. */
16689 if (divisor == (divisor & -divisor))
16691 tree t;
16693 t = build_int_cst (TREE_TYPE (value), -divisor);
16694 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16696 else
16698 if (!div)
16699 div = build_int_cst (TREE_TYPE (value), divisor);
16700 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16701 value = size_binop_loc (loc, MULT_EXPR, value, div);
16704 return value;
16707 /* Returns the pointer to the base of the object addressed by EXP and
16708 extracts the information about the offset of the access, storing it
16709 to PBITPOS and POFFSET. */
16711 static tree
16712 split_address_to_core_and_offset (tree exp,
16713 HOST_WIDE_INT *pbitpos, tree *poffset)
16715 tree core;
16716 enum machine_mode mode;
16717 int unsignedp, volatilep;
16718 HOST_WIDE_INT bitsize;
16719 location_t loc = EXPR_LOCATION (exp);
16721 if (TREE_CODE (exp) == ADDR_EXPR)
16723 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16724 poffset, &mode, &unsignedp, &volatilep,
16725 false);
16726 core = build_fold_addr_expr_loc (loc, core);
16728 else
16730 core = exp;
16731 *pbitpos = 0;
16732 *poffset = NULL_TREE;
16735 return core;
16738 /* Returns true if addresses of E1 and E2 differ by a constant, false
16739 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16741 bool
16742 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16744 tree core1, core2;
16745 HOST_WIDE_INT bitpos1, bitpos2;
16746 tree toffset1, toffset2, tdiff, type;
16748 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16749 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16751 if (bitpos1 % BITS_PER_UNIT != 0
16752 || bitpos2 % BITS_PER_UNIT != 0
16753 || !operand_equal_p (core1, core2, 0))
16754 return false;
16756 if (toffset1 && toffset2)
16758 type = TREE_TYPE (toffset1);
16759 if (type != TREE_TYPE (toffset2))
16760 toffset2 = fold_convert (type, toffset2);
16762 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16763 if (!cst_and_fits_in_hwi (tdiff))
16764 return false;
16766 *diff = int_cst_value (tdiff);
16768 else if (toffset1 || toffset2)
16770 /* If only one of the offsets is non-constant, the difference cannot
16771 be a constant. */
16772 return false;
16774 else
16775 *diff = 0;
16777 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16778 return true;
16781 /* Simplify the floating point expression EXP when the sign of the
16782 result is not significant. Return NULL_TREE if no simplification
16783 is possible. */
16785 tree
16786 fold_strip_sign_ops (tree exp)
16788 tree arg0, arg1;
16789 location_t loc = EXPR_LOCATION (exp);
16791 switch (TREE_CODE (exp))
16793 case ABS_EXPR:
16794 case NEGATE_EXPR:
16795 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16796 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16798 case MULT_EXPR:
16799 case RDIV_EXPR:
16800 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16801 return NULL_TREE;
16802 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16803 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16804 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16805 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16806 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16807 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16808 break;
16810 case COMPOUND_EXPR:
16811 arg0 = TREE_OPERAND (exp, 0);
16812 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16813 if (arg1)
16814 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16815 break;
16817 case COND_EXPR:
16818 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16819 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16820 if (arg0 || arg1)
16821 return fold_build3_loc (loc,
16822 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16823 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16824 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16825 break;
16827 case CALL_EXPR:
16829 const enum built_in_function fcode = builtin_mathfn_code (exp);
16830 switch (fcode)
16832 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16833 /* Strip copysign function call, return the 1st argument. */
16834 arg0 = CALL_EXPR_ARG (exp, 0);
16835 arg1 = CALL_EXPR_ARG (exp, 1);
16836 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16838 default:
16839 /* Strip sign ops from the argument of "odd" math functions. */
16840 if (negate_mathfn_p (fcode))
16842 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16843 if (arg0)
16844 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16846 break;
16849 break;
16851 default:
16852 break;
16854 return NULL_TREE;