PR c++/56838
[official-gcc.git] / gcc / fold-const.c
blobdcf7aa0d6a5040ec24a29eb13fd154b378381a92
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
454 /* Fall through. */
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
478 case NOP_EXPR:
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type) == REAL_TYPE)
482 tree tem = strip_float_extensions (t);
483 if (tem != t)
484 return negate_expr_p (tem);
486 break;
488 case CALL_EXPR:
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t)))
491 return negate_expr_p (CALL_EXPR_ARG (t, 0));
492 break;
494 case RSHIFT_EXPR:
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
498 tree op1 = TREE_OPERAND (t, 1);
499 if (TREE_INT_CST_HIGH (op1) == 0
500 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
501 == TREE_INT_CST_LOW (op1))
502 return true;
504 break;
506 default:
507 break;
509 return false;
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
515 returned. */
517 static tree
518 fold_negate_expr (location_t loc, tree t)
520 tree type = TREE_TYPE (t);
521 tree tem;
523 switch (TREE_CODE (t))
525 /* Convert - (~A) to A + 1. */
526 case BIT_NOT_EXPR:
527 if (INTEGRAL_TYPE_P (type))
528 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
529 build_int_cst (type, 1));
530 break;
532 case INTEGER_CST:
533 tem = fold_negate_const (t, type);
534 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
535 || !TYPE_OVERFLOW_TRAPS (type))
536 return tem;
537 break;
539 case REAL_CST:
540 tem = fold_negate_const (t, type);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 return tem;
544 break;
546 case FIXED_CST:
547 tem = fold_negate_const (t, type);
548 return tem;
550 case COMPLEX_CST:
552 tree rpart = negate_expr (TREE_REALPART (t));
553 tree ipart = negate_expr (TREE_IMAGPART (t));
555 if ((TREE_CODE (rpart) == REAL_CST
556 && TREE_CODE (ipart) == REAL_CST)
557 || (TREE_CODE (rpart) == INTEGER_CST
558 && TREE_CODE (ipart) == INTEGER_CST))
559 return build_complex (type, rpart, ipart);
561 break;
563 case COMPLEX_EXPR:
564 if (negate_expr_p (t))
565 return fold_build2_loc (loc, COMPLEX_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
567 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
568 break;
570 case CONJ_EXPR:
571 if (negate_expr_p (t))
572 return fold_build1_loc (loc, CONJ_EXPR, type,
573 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
574 break;
576 case NEGATE_EXPR:
577 return TREE_OPERAND (t, 0);
579 case PLUS_EXPR:
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t, 1))
585 && reorder_operands_p (TREE_OPERAND (t, 0),
586 TREE_OPERAND (t, 1)))
588 tem = negate_expr (TREE_OPERAND (t, 1));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t, 0)))
596 tem = negate_expr (TREE_OPERAND (t, 0));
597 return fold_build2_loc (loc, MINUS_EXPR, type,
598 tem, TREE_OPERAND (t, 1));
601 break;
603 case MINUS_EXPR:
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
607 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
610 break;
612 case MULT_EXPR:
613 if (TYPE_UNSIGNED (type))
614 break;
616 /* Fall through. */
618 case RDIV_EXPR:
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
621 tem = TREE_OPERAND (t, 1);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 TREE_OPERAND (t, 0), negate_expr (tem));
625 tem = TREE_OPERAND (t, 0);
626 if (negate_expr_p (tem))
627 return fold_build2_loc (loc, TREE_CODE (t), type,
628 negate_expr (tem), TREE_OPERAND (t, 1));
630 break;
632 case TRUNC_DIV_EXPR:
633 case ROUND_DIV_EXPR:
634 case FLOOR_DIV_EXPR:
635 case CEIL_DIV_EXPR:
636 case EXACT_DIV_EXPR:
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
641 overflow. */
642 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
644 const char * const warnmsg = G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
649 if (INTEGRAL_TYPE_P (type)
650 && (TREE_CODE (tem) != INTEGER_CST
651 || integer_onep (tem)))
652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
659 if (INTEGRAL_TYPE_P (type)
660 && (TREE_CODE (tem) != INTEGER_CST
661 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
667 break;
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
677 break;
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
684 tree fndecl, arg;
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
690 break;
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
696 tree op1 = TREE_OPERAND (t, 1);
697 if (TREE_INT_CST_HIGH (op1) == 0
698 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
699 == TREE_INT_CST_LOW (op1))
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
709 break;
711 default:
712 break;
715 return NULL_TREE;
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
720 return NULL_TREE. */
722 static tree
723 negate_expr (tree t)
725 tree type, tem;
726 location_t loc;
728 if (t == NULL_TREE)
729 return NULL_TREE;
731 loc = EXPR_LOCATION (t);
732 type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
735 tem = fold_negate_expr (loc, t);
736 if (!tem)
737 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
738 return fold_convert_loc (loc, type, tem);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
765 tree var = 0;
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
822 else if (TREE_CODE (in) == BIT_NOT_EXPR
823 && code == PLUS_EXPR)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp = build_one_cst (TREE_TYPE (in));
827 var = negate_expr (TREE_OPERAND (in, 0));
829 else if (TREE_CONSTANT (in))
830 *conp = in;
831 else
832 var = in;
834 if (negate_p)
836 if (*litp)
837 *minus_litp = *litp, *litp = 0;
838 else if (*minus_litp)
839 *litp = *minus_litp, *minus_litp = 0;
840 *conp = negate_expr (*conp);
841 var = negate_expr (var);
844 return var;
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
852 static tree
853 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
855 if (t1 == 0)
856 return t2;
857 else if (t2 == 0)
858 return t1;
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
864 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
866 if (code == PLUS_EXPR)
868 if (TREE_CODE (t1) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t2),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t1, 0)));
873 else if (TREE_CODE (t2) == NEGATE_EXPR)
874 return build2_loc (loc, MINUS_EXPR, type,
875 fold_convert_loc (loc, type, t1),
876 fold_convert_loc (loc, type,
877 TREE_OPERAND (t2, 0)));
878 else if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
881 else if (code == MINUS_EXPR)
883 if (integer_zerop (t2))
884 return fold_convert_loc (loc, type, t1);
887 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
888 fold_convert_loc (loc, type, t2));
891 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
898 static bool
899 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
901 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
902 return false;
903 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
904 return false;
906 switch (code)
908 case LSHIFT_EXPR:
909 case RSHIFT_EXPR:
910 case LROTATE_EXPR:
911 case RROTATE_EXPR:
912 return true;
914 default:
915 break;
918 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
919 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
920 && TYPE_MODE (type1) == TYPE_MODE (type2);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
928 static tree
929 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
930 int overflowable)
932 double_int op1, op2, res, tmp;
933 tree t;
934 tree type = TREE_TYPE (arg1);
935 bool uns = TYPE_UNSIGNED (type);
936 bool overflow = false;
938 op1 = tree_to_double_int (arg1);
939 op2 = tree_to_double_int (arg2);
941 switch (code)
943 case BIT_IOR_EXPR:
944 res = op1 | op2;
945 break;
947 case BIT_XOR_EXPR:
948 res = op1 ^ op2;
949 break;
951 case BIT_AND_EXPR:
952 res = op1 & op2;
953 break;
955 case RSHIFT_EXPR:
956 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
957 break;
959 case LSHIFT_EXPR:
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
964 break;
966 case RROTATE_EXPR:
967 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
968 break;
970 case LROTATE_EXPR:
971 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
972 break;
974 case PLUS_EXPR:
975 res = op1.add_with_sign (op2, false, &overflow);
976 break;
978 case MINUS_EXPR:
979 res = op1.sub_with_overflow (op2, &overflow);
980 break;
982 case MULT_EXPR:
983 res = op1.mul_with_sign (op2, false, &overflow);
984 break;
986 case MULT_HIGHPART_EXPR:
987 /* ??? Need quad precision, or an additional shift operand
988 to the multiply primitive, to handle very large highparts. */
989 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
990 return NULL_TREE;
991 tmp = op1 - op2;
992 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
993 break;
995 case TRUNC_DIV_EXPR:
996 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
997 case EXACT_DIV_EXPR:
998 /* This is a shortcut for a common special case. */
999 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1000 && !TREE_OVERFLOW (arg1)
1001 && !TREE_OVERFLOW (arg2)
1002 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1004 if (code == CEIL_DIV_EXPR)
1005 op1.low += op2.low - 1;
1007 res.low = op1.low / op2.low, res.high = 0;
1008 break;
1011 /* ... fall through ... */
1013 case ROUND_DIV_EXPR:
1014 if (op2.is_zero ())
1015 return NULL_TREE;
1016 if (op2.is_one ())
1018 res = op1;
1019 break;
1021 if (op1 == op2 && !op1.is_zero ())
1023 res = double_int_one;
1024 break;
1026 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1027 break;
1029 case TRUNC_MOD_EXPR:
1030 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1031 /* This is a shortcut for a common special case. */
1032 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1033 && !TREE_OVERFLOW (arg1)
1034 && !TREE_OVERFLOW (arg2)
1035 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1037 if (code == CEIL_MOD_EXPR)
1038 op1.low += op2.low - 1;
1039 res.low = op1.low % op2.low, res.high = 0;
1040 break;
1043 /* ... fall through ... */
1045 case ROUND_MOD_EXPR:
1046 if (op2.is_zero ())
1047 return NULL_TREE;
1048 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1049 break;
1051 case MIN_EXPR:
1052 res = op1.min (op2, uns);
1053 break;
1055 case MAX_EXPR:
1056 res = op1.max (op2, uns);
1057 break;
1059 default:
1060 return NULL_TREE;
1063 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1064 (!uns && overflow)
1065 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1067 return t;
1070 tree
1071 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1073 return int_const_binop_1 (code, arg1, arg2, 1);
1076 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1077 constant. We assume ARG1 and ARG2 have the same data type, or at least
1078 are the same kind of constant and the same machine mode. Return zero if
1079 combining the constants is not allowed in the current operating mode. */
1081 static tree
1082 const_binop (enum tree_code code, tree arg1, tree arg2)
1084 /* Sanity check for the recursive cases. */
1085 if (!arg1 || !arg2)
1086 return NULL_TREE;
1088 STRIP_NOPS (arg1);
1089 STRIP_NOPS (arg2);
1091 if (TREE_CODE (arg1) == INTEGER_CST)
1092 return int_const_binop (code, arg1, arg2);
1094 if (TREE_CODE (arg1) == REAL_CST)
1096 enum machine_mode mode;
1097 REAL_VALUE_TYPE d1;
1098 REAL_VALUE_TYPE d2;
1099 REAL_VALUE_TYPE value;
1100 REAL_VALUE_TYPE result;
1101 bool inexact;
1102 tree t, type;
1104 /* The following codes are handled by real_arithmetic. */
1105 switch (code)
1107 case PLUS_EXPR:
1108 case MINUS_EXPR:
1109 case MULT_EXPR:
1110 case RDIV_EXPR:
1111 case MIN_EXPR:
1112 case MAX_EXPR:
1113 break;
1115 default:
1116 return NULL_TREE;
1119 d1 = TREE_REAL_CST (arg1);
1120 d2 = TREE_REAL_CST (arg2);
1122 type = TREE_TYPE (arg1);
1123 mode = TYPE_MODE (type);
1125 /* Don't perform operation if we honor signaling NaNs and
1126 either operand is a NaN. */
1127 if (HONOR_SNANS (mode)
1128 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1129 return NULL_TREE;
1131 /* Don't perform operation if it would raise a division
1132 by zero exception. */
1133 if (code == RDIV_EXPR
1134 && REAL_VALUES_EQUAL (d2, dconst0)
1135 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1136 return NULL_TREE;
1138 /* If either operand is a NaN, just return it. Otherwise, set up
1139 for floating-point trap; we return an overflow. */
1140 if (REAL_VALUE_ISNAN (d1))
1141 return arg1;
1142 else if (REAL_VALUE_ISNAN (d2))
1143 return arg2;
1145 inexact = real_arithmetic (&value, code, &d1, &d2);
1146 real_convert (&result, mode, &value);
1148 /* Don't constant fold this floating point operation if
1149 the result has overflowed and flag_trapping_math. */
1150 if (flag_trapping_math
1151 && MODE_HAS_INFINITIES (mode)
1152 && REAL_VALUE_ISINF (result)
1153 && !REAL_VALUE_ISINF (d1)
1154 && !REAL_VALUE_ISINF (d2))
1155 return NULL_TREE;
1157 /* Don't constant fold this floating point operation if the
1158 result may dependent upon the run-time rounding mode and
1159 flag_rounding_math is set, or if GCC's software emulation
1160 is unable to accurately represent the result. */
1161 if ((flag_rounding_math
1162 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1163 && (inexact || !real_identical (&result, &value)))
1164 return NULL_TREE;
1166 t = build_real (type, result);
1168 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1169 return t;
1172 if (TREE_CODE (arg1) == FIXED_CST)
1174 FIXED_VALUE_TYPE f1;
1175 FIXED_VALUE_TYPE f2;
1176 FIXED_VALUE_TYPE result;
1177 tree t, type;
1178 int sat_p;
1179 bool overflow_p;
1181 /* The following codes are handled by fixed_arithmetic. */
1182 switch (code)
1184 case PLUS_EXPR:
1185 case MINUS_EXPR:
1186 case MULT_EXPR:
1187 case TRUNC_DIV_EXPR:
1188 f2 = TREE_FIXED_CST (arg2);
1189 break;
1191 case LSHIFT_EXPR:
1192 case RSHIFT_EXPR:
1193 f2.data.high = TREE_INT_CST_HIGH (arg2);
1194 f2.data.low = TREE_INT_CST_LOW (arg2);
1195 f2.mode = SImode;
1196 break;
1198 default:
1199 return NULL_TREE;
1202 f1 = TREE_FIXED_CST (arg1);
1203 type = TREE_TYPE (arg1);
1204 sat_p = TYPE_SATURATING (type);
1205 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1206 t = build_fixed (type, result);
1207 /* Propagate overflow flags. */
1208 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1209 TREE_OVERFLOW (t) = 1;
1210 return t;
1213 if (TREE_CODE (arg1) == COMPLEX_CST)
1215 tree type = TREE_TYPE (arg1);
1216 tree r1 = TREE_REALPART (arg1);
1217 tree i1 = TREE_IMAGPART (arg1);
1218 tree r2 = TREE_REALPART (arg2);
1219 tree i2 = TREE_IMAGPART (arg2);
1220 tree real, imag;
1222 switch (code)
1224 case PLUS_EXPR:
1225 case MINUS_EXPR:
1226 real = const_binop (code, r1, r2);
1227 imag = const_binop (code, i1, i2);
1228 break;
1230 case MULT_EXPR:
1231 if (COMPLEX_FLOAT_TYPE_P (type))
1232 return do_mpc_arg2 (arg1, arg2, type,
1233 /* do_nonfinite= */ folding_initializer,
1234 mpc_mul);
1236 real = const_binop (MINUS_EXPR,
1237 const_binop (MULT_EXPR, r1, r2),
1238 const_binop (MULT_EXPR, i1, i2));
1239 imag = const_binop (PLUS_EXPR,
1240 const_binop (MULT_EXPR, r1, i2),
1241 const_binop (MULT_EXPR, i1, r2));
1242 break;
1244 case RDIV_EXPR:
1245 if (COMPLEX_FLOAT_TYPE_P (type))
1246 return do_mpc_arg2 (arg1, arg2, type,
1247 /* do_nonfinite= */ folding_initializer,
1248 mpc_div);
1249 /* Fallthru ... */
1250 case TRUNC_DIV_EXPR:
1251 case CEIL_DIV_EXPR:
1252 case FLOOR_DIV_EXPR:
1253 case ROUND_DIV_EXPR:
1254 if (flag_complex_method == 0)
1256 /* Keep this algorithm in sync with
1257 tree-complex.c:expand_complex_div_straight().
1259 Expand complex division to scalars, straightforward algorithm.
1260 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1261 t = br*br + bi*bi
1263 tree magsquared
1264 = const_binop (PLUS_EXPR,
1265 const_binop (MULT_EXPR, r2, r2),
1266 const_binop (MULT_EXPR, i2, i2));
1267 tree t1
1268 = const_binop (PLUS_EXPR,
1269 const_binop (MULT_EXPR, r1, r2),
1270 const_binop (MULT_EXPR, i1, i2));
1271 tree t2
1272 = const_binop (MINUS_EXPR,
1273 const_binop (MULT_EXPR, i1, r2),
1274 const_binop (MULT_EXPR, r1, i2));
1276 real = const_binop (code, t1, magsquared);
1277 imag = const_binop (code, t2, magsquared);
1279 else
1281 /* Keep this algorithm in sync with
1282 tree-complex.c:expand_complex_div_wide().
1284 Expand complex division to scalars, modified algorithm to minimize
1285 overflow with wide input ranges. */
1286 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1287 fold_abs_const (r2, TREE_TYPE (type)),
1288 fold_abs_const (i2, TREE_TYPE (type)));
1290 if (integer_nonzerop (compare))
1292 /* In the TRUE branch, we compute
1293 ratio = br/bi;
1294 div = (br * ratio) + bi;
1295 tr = (ar * ratio) + ai;
1296 ti = (ai * ratio) - ar;
1297 tr = tr / div;
1298 ti = ti / div; */
1299 tree ratio = const_binop (code, r2, i2);
1300 tree div = const_binop (PLUS_EXPR, i2,
1301 const_binop (MULT_EXPR, r2, ratio));
1302 real = const_binop (MULT_EXPR, r1, ratio);
1303 real = const_binop (PLUS_EXPR, real, i1);
1304 real = const_binop (code, real, div);
1306 imag = const_binop (MULT_EXPR, i1, ratio);
1307 imag = const_binop (MINUS_EXPR, imag, r1);
1308 imag = const_binop (code, imag, div);
1310 else
1312 /* In the FALSE branch, we compute
1313 ratio = d/c;
1314 divisor = (d * ratio) + c;
1315 tr = (b * ratio) + a;
1316 ti = b - (a * ratio);
1317 tr = tr / div;
1318 ti = ti / div; */
1319 tree ratio = const_binop (code, i2, r2);
1320 tree div = const_binop (PLUS_EXPR, r2,
1321 const_binop (MULT_EXPR, i2, ratio));
1323 real = const_binop (MULT_EXPR, i1, ratio);
1324 real = const_binop (PLUS_EXPR, real, r1);
1325 real = const_binop (code, real, div);
1327 imag = const_binop (MULT_EXPR, r1, ratio);
1328 imag = const_binop (MINUS_EXPR, i1, imag);
1329 imag = const_binop (code, imag, div);
1332 break;
1334 default:
1335 return NULL_TREE;
1338 if (real && imag)
1339 return build_complex (type, real, imag);
1342 if (TREE_CODE (arg1) == VECTOR_CST
1343 && TREE_CODE (arg2) == VECTOR_CST)
1345 tree type = TREE_TYPE(arg1);
1346 int count = TYPE_VECTOR_SUBPARTS (type), i;
1347 tree *elts = XALLOCAVEC (tree, count);
1349 for (i = 0; i < count; i++)
1351 tree elem1 = VECTOR_CST_ELT (arg1, i);
1352 tree elem2 = VECTOR_CST_ELT (arg2, i);
1354 elts[i] = const_binop (code, elem1, elem2);
1356 /* It is possible that const_binop cannot handle the given
1357 code and return NULL_TREE */
1358 if(elts[i] == NULL_TREE)
1359 return NULL_TREE;
1362 return build_vector (type, elts);
1364 return NULL_TREE;
1367 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1368 indicates which particular sizetype to create. */
1370 tree
1371 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1373 return build_int_cst (sizetype_tab[(int) kind], number);
1376 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1377 is a tree code. The type of the result is taken from the operands.
1378 Both must be equivalent integer types, ala int_binop_types_match_p.
1379 If the operands are constant, so is the result. */
1381 tree
1382 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1384 tree type = TREE_TYPE (arg0);
1386 if (arg0 == error_mark_node || arg1 == error_mark_node)
1387 return error_mark_node;
1389 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1390 TREE_TYPE (arg1)));
1392 /* Handle the special case of two integer constants faster. */
1393 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1395 /* And some specific cases even faster than that. */
1396 if (code == PLUS_EXPR)
1398 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1399 return arg1;
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MINUS_EXPR)
1405 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1406 return arg0;
1408 else if (code == MULT_EXPR)
1410 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1411 return arg1;
1414 /* Handle general case of two integer constants. For sizetype
1415 constant calculations we always want to know about overflow,
1416 even in the unsigned case. */
1417 return int_const_binop_1 (code, arg0, arg1, -1);
1420 return fold_build2_loc (loc, code, type, arg0, arg1);
1423 /* Given two values, either both of sizetype or both of bitsizetype,
1424 compute the difference between the two values. Return the value
1425 in signed type corresponding to the type of the operands. */
1427 tree
1428 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1430 tree type = TREE_TYPE (arg0);
1431 tree ctype;
1433 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1434 TREE_TYPE (arg1)));
1436 /* If the type is already signed, just do the simple thing. */
1437 if (!TYPE_UNSIGNED (type))
1438 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1440 if (type == sizetype)
1441 ctype = ssizetype;
1442 else if (type == bitsizetype)
1443 ctype = sbitsizetype;
1444 else
1445 ctype = signed_type_for (type);
1447 /* If either operand is not a constant, do the conversions to the signed
1448 type and subtract. The hardware will do the right thing with any
1449 overflow in the subtraction. */
1450 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1451 return size_binop_loc (loc, MINUS_EXPR,
1452 fold_convert_loc (loc, ctype, arg0),
1453 fold_convert_loc (loc, ctype, arg1));
1455 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1456 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1457 overflow) and negate (which can't either). Special-case a result
1458 of zero while we're here. */
1459 if (tree_int_cst_equal (arg0, arg1))
1460 return build_int_cst (ctype, 0);
1461 else if (tree_int_cst_lt (arg1, arg0))
1462 return fold_convert_loc (loc, ctype,
1463 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1464 else
1465 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1466 fold_convert_loc (loc, ctype,
1467 size_binop_loc (loc,
1468 MINUS_EXPR,
1469 arg1, arg0)));
1472 /* A subroutine of fold_convert_const handling conversions of an
1473 INTEGER_CST to another integer type. */
1475 static tree
1476 fold_convert_const_int_from_int (tree type, const_tree arg1)
1478 tree t;
1480 /* Given an integer constant, make new constant with new type,
1481 appropriately sign-extended or truncated. */
1482 t = force_fit_type_double (type, tree_to_double_int (arg1),
1483 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1484 (TREE_INT_CST_HIGH (arg1) < 0
1485 && (TYPE_UNSIGNED (type)
1486 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1487 | TREE_OVERFLOW (arg1));
1489 return t;
1492 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1493 to an integer type. */
1495 static tree
1496 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1498 int overflow = 0;
1499 tree t;
1501 /* The following code implements the floating point to integer
1502 conversion rules required by the Java Language Specification,
1503 that IEEE NaNs are mapped to zero and values that overflow
1504 the target precision saturate, i.e. values greater than
1505 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1506 are mapped to INT_MIN. These semantics are allowed by the
1507 C and C++ standards that simply state that the behavior of
1508 FP-to-integer conversion is unspecified upon overflow. */
1510 double_int val;
1511 REAL_VALUE_TYPE r;
1512 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1514 switch (code)
1516 case FIX_TRUNC_EXPR:
1517 real_trunc (&r, VOIDmode, &x);
1518 break;
1520 default:
1521 gcc_unreachable ();
1524 /* If R is NaN, return zero and show we have an overflow. */
1525 if (REAL_VALUE_ISNAN (r))
1527 overflow = 1;
1528 val = double_int_zero;
1531 /* See if R is less than the lower bound or greater than the
1532 upper bound. */
1534 if (! overflow)
1536 tree lt = TYPE_MIN_VALUE (type);
1537 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1538 if (REAL_VALUES_LESS (r, l))
1540 overflow = 1;
1541 val = tree_to_double_int (lt);
1545 if (! overflow)
1547 tree ut = TYPE_MAX_VALUE (type);
1548 if (ut)
1550 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1551 if (REAL_VALUES_LESS (u, r))
1553 overflow = 1;
1554 val = tree_to_double_int (ut);
1559 if (! overflow)
1560 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1562 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1563 return t;
1566 /* A subroutine of fold_convert_const handling conversions of a
1567 FIXED_CST to an integer type. */
1569 static tree
1570 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1572 tree t;
1573 double_int temp, temp_trunc;
1574 unsigned int mode;
1576 /* Right shift FIXED_CST to temp by fbit. */
1577 temp = TREE_FIXED_CST (arg1).data;
1578 mode = TREE_FIXED_CST (arg1).mode;
1579 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1581 temp = temp.rshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 /* Left shift temp to temp_trunc by fbit. */
1586 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1587 HOST_BITS_PER_DOUBLE_INT,
1588 SIGNED_FIXED_POINT_MODE_P (mode));
1590 else
1592 temp = double_int_zero;
1593 temp_trunc = double_int_zero;
1596 /* If FIXED_CST is negative, we need to round the value toward 0.
1597 By checking if the fractional bits are not zero to add 1 to temp. */
1598 if (SIGNED_FIXED_POINT_MODE_P (mode)
1599 && temp_trunc.is_negative ()
1600 && TREE_FIXED_CST (arg1).data != temp_trunc)
1601 temp += double_int_one;
1603 /* Given a fixed-point constant, make new constant with new type,
1604 appropriately sign-extended or truncated. */
1605 t = force_fit_type_double (type, temp, -1,
1606 (temp.is_negative ()
1607 && (TYPE_UNSIGNED (type)
1608 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1609 | TREE_OVERFLOW (arg1));
1611 return t;
1614 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1615 to another floating point type. */
1617 static tree
1618 fold_convert_const_real_from_real (tree type, const_tree arg1)
1620 REAL_VALUE_TYPE value;
1621 tree t;
1623 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1624 t = build_real (type, value);
1626 /* If converting an infinity or NAN to a representation that doesn't
1627 have one, set the overflow bit so that we can produce some kind of
1628 error message at the appropriate point if necessary. It's not the
1629 most user-friendly message, but it's better than nothing. */
1630 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1631 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1632 TREE_OVERFLOW (t) = 1;
1633 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1634 && !MODE_HAS_NANS (TYPE_MODE (type)))
1635 TREE_OVERFLOW (t) = 1;
1636 /* Regular overflow, conversion produced an infinity in a mode that
1637 can't represent them. */
1638 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1639 && REAL_VALUE_ISINF (value)
1640 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1641 TREE_OVERFLOW (t) = 1;
1642 else
1643 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1644 return t;
1647 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1648 to a floating point type. */
1650 static tree
1651 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1653 REAL_VALUE_TYPE value;
1654 tree t;
1656 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1657 t = build_real (type, value);
1659 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1660 return t;
1663 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1664 to another fixed-point type. */
1666 static tree
1667 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1669 FIXED_VALUE_TYPE value;
1670 tree t;
1671 bool overflow_p;
1673 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1674 TYPE_SATURATING (type));
1675 t = build_fixed (type, value);
1677 /* Propagate overflow flags. */
1678 if (overflow_p | TREE_OVERFLOW (arg1))
1679 TREE_OVERFLOW (t) = 1;
1680 return t;
1683 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1684 to a fixed-point type. */
1686 static tree
1687 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1689 FIXED_VALUE_TYPE value;
1690 tree t;
1691 bool overflow_p;
1693 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1694 TREE_INT_CST (arg1),
1695 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1696 TYPE_SATURATING (type));
1697 t = build_fixed (type, value);
1699 /* Propagate overflow flags. */
1700 if (overflow_p | TREE_OVERFLOW (arg1))
1701 TREE_OVERFLOW (t) = 1;
1702 return t;
1705 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1706 to a fixed-point type. */
1708 static tree
1709 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1711 FIXED_VALUE_TYPE value;
1712 tree t;
1713 bool overflow_p;
1715 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1716 &TREE_REAL_CST (arg1),
1717 TYPE_SATURATING (type));
1718 t = build_fixed (type, value);
1720 /* Propagate overflow flags. */
1721 if (overflow_p | TREE_OVERFLOW (arg1))
1722 TREE_OVERFLOW (t) = 1;
1723 return t;
1726 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1727 type TYPE. If no simplification can be done return NULL_TREE. */
1729 static tree
1730 fold_convert_const (enum tree_code code, tree type, tree arg1)
1732 if (TREE_TYPE (arg1) == type)
1733 return arg1;
1735 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1736 || TREE_CODE (type) == OFFSET_TYPE)
1738 if (TREE_CODE (arg1) == INTEGER_CST)
1739 return fold_convert_const_int_from_int (type, arg1);
1740 else if (TREE_CODE (arg1) == REAL_CST)
1741 return fold_convert_const_int_from_real (code, type, arg1);
1742 else if (TREE_CODE (arg1) == FIXED_CST)
1743 return fold_convert_const_int_from_fixed (type, arg1);
1745 else if (TREE_CODE (type) == REAL_TYPE)
1747 if (TREE_CODE (arg1) == INTEGER_CST)
1748 return build_real_from_int_cst (type, arg1);
1749 else if (TREE_CODE (arg1) == REAL_CST)
1750 return fold_convert_const_real_from_real (type, arg1);
1751 else if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_real_from_fixed (type, arg1);
1754 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1756 if (TREE_CODE (arg1) == FIXED_CST)
1757 return fold_convert_const_fixed_from_fixed (type, arg1);
1758 else if (TREE_CODE (arg1) == INTEGER_CST)
1759 return fold_convert_const_fixed_from_int (type, arg1);
1760 else if (TREE_CODE (arg1) == REAL_CST)
1761 return fold_convert_const_fixed_from_real (type, arg1);
1763 return NULL_TREE;
1766 /* Construct a vector of zero elements of vector type TYPE. */
1768 static tree
1769 build_zero_vector (tree type)
1771 tree t;
1773 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1774 return build_vector_from_val (type, t);
1777 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1779 bool
1780 fold_convertible_p (const_tree type, const_tree arg)
1782 tree orig = TREE_TYPE (arg);
1784 if (type == orig)
1785 return true;
1787 if (TREE_CODE (arg) == ERROR_MARK
1788 || TREE_CODE (type) == ERROR_MARK
1789 || TREE_CODE (orig) == ERROR_MARK)
1790 return false;
1792 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1793 return true;
1795 switch (TREE_CODE (type))
1797 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1798 case POINTER_TYPE: case REFERENCE_TYPE:
1799 case OFFSET_TYPE:
1800 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1801 || TREE_CODE (orig) == OFFSET_TYPE)
1802 return true;
1803 return (TREE_CODE (orig) == VECTOR_TYPE
1804 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1806 case REAL_TYPE:
1807 case FIXED_POINT_TYPE:
1808 case COMPLEX_TYPE:
1809 case VECTOR_TYPE:
1810 case VOID_TYPE:
1811 return TREE_CODE (type) == TREE_CODE (orig);
1813 default:
1814 return false;
1818 /* Convert expression ARG to type TYPE. Used by the middle-end for
1819 simple conversions in preference to calling the front-end's convert. */
1821 tree
1822 fold_convert_loc (location_t loc, tree type, tree arg)
1824 tree orig = TREE_TYPE (arg);
1825 tree tem;
1827 if (type == orig)
1828 return arg;
1830 if (TREE_CODE (arg) == ERROR_MARK
1831 || TREE_CODE (type) == ERROR_MARK
1832 || TREE_CODE (orig) == ERROR_MARK)
1833 return error_mark_node;
1835 switch (TREE_CODE (type))
1837 case POINTER_TYPE:
1838 case REFERENCE_TYPE:
1839 /* Handle conversions between pointers to different address spaces. */
1840 if (POINTER_TYPE_P (orig)
1841 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1842 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1843 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1844 /* fall through */
1846 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1847 case OFFSET_TYPE:
1848 if (TREE_CODE (arg) == INTEGER_CST)
1850 tem = fold_convert_const (NOP_EXPR, type, arg);
1851 if (tem != NULL_TREE)
1852 return tem;
1854 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1855 || TREE_CODE (orig) == OFFSET_TYPE)
1856 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1857 if (TREE_CODE (orig) == COMPLEX_TYPE)
1858 return fold_convert_loc (loc, type,
1859 fold_build1_loc (loc, REALPART_EXPR,
1860 TREE_TYPE (orig), arg));
1861 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1862 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1863 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1865 case REAL_TYPE:
1866 if (TREE_CODE (arg) == INTEGER_CST)
1868 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1869 if (tem != NULL_TREE)
1870 return tem;
1872 else if (TREE_CODE (arg) == REAL_CST)
1874 tem = fold_convert_const (NOP_EXPR, type, arg);
1875 if (tem != NULL_TREE)
1876 return tem;
1878 else if (TREE_CODE (arg) == FIXED_CST)
1880 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1881 if (tem != NULL_TREE)
1882 return tem;
1885 switch (TREE_CODE (orig))
1887 case INTEGER_TYPE:
1888 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1889 case POINTER_TYPE: case REFERENCE_TYPE:
1890 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1892 case REAL_TYPE:
1893 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 case FIXED_POINT_TYPE:
1896 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1898 case COMPLEX_TYPE:
1899 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1900 return fold_convert_loc (loc, type, tem);
1902 default:
1903 gcc_unreachable ();
1906 case FIXED_POINT_TYPE:
1907 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1908 || TREE_CODE (arg) == REAL_CST)
1910 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1912 goto fold_convert_exit;
1915 switch (TREE_CODE (orig))
1917 case FIXED_POINT_TYPE:
1918 case INTEGER_TYPE:
1919 case ENUMERAL_TYPE:
1920 case BOOLEAN_TYPE:
1921 case REAL_TYPE:
1922 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1924 case COMPLEX_TYPE:
1925 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1926 return fold_convert_loc (loc, type, tem);
1928 default:
1929 gcc_unreachable ();
1932 case COMPLEX_TYPE:
1933 switch (TREE_CODE (orig))
1935 case INTEGER_TYPE:
1936 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1937 case POINTER_TYPE: case REFERENCE_TYPE:
1938 case REAL_TYPE:
1939 case FIXED_POINT_TYPE:
1940 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1941 fold_convert_loc (loc, TREE_TYPE (type), arg),
1942 fold_convert_loc (loc, TREE_TYPE (type),
1943 integer_zero_node));
1944 case COMPLEX_TYPE:
1946 tree rpart, ipart;
1948 if (TREE_CODE (arg) == COMPLEX_EXPR)
1950 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1951 TREE_OPERAND (arg, 0));
1952 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1953 TREE_OPERAND (arg, 1));
1954 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1957 arg = save_expr (arg);
1958 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1959 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1960 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1961 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1962 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1965 default:
1966 gcc_unreachable ();
1969 case VECTOR_TYPE:
1970 if (integer_zerop (arg))
1971 return build_zero_vector (type);
1972 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1973 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1974 || TREE_CODE (orig) == VECTOR_TYPE);
1975 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1977 case VOID_TYPE:
1978 tem = fold_ignored_result (arg);
1979 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1981 default:
1982 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1983 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1984 gcc_unreachable ();
1986 fold_convert_exit:
1987 protected_set_expr_location_unshare (tem, loc);
1988 return tem;
1991 /* Return false if expr can be assumed not to be an lvalue, true
1992 otherwise. */
1994 static bool
1995 maybe_lvalue_p (const_tree x)
1997 /* We only need to wrap lvalue tree codes. */
1998 switch (TREE_CODE (x))
2000 case VAR_DECL:
2001 case PARM_DECL:
2002 case RESULT_DECL:
2003 case LABEL_DECL:
2004 case FUNCTION_DECL:
2005 case SSA_NAME:
2007 case COMPONENT_REF:
2008 case MEM_REF:
2009 case INDIRECT_REF:
2010 case ARRAY_REF:
2011 case ARRAY_RANGE_REF:
2012 case BIT_FIELD_REF:
2013 case OBJ_TYPE_REF:
2015 case REALPART_EXPR:
2016 case IMAGPART_EXPR:
2017 case PREINCREMENT_EXPR:
2018 case PREDECREMENT_EXPR:
2019 case SAVE_EXPR:
2020 case TRY_CATCH_EXPR:
2021 case WITH_CLEANUP_EXPR:
2022 case COMPOUND_EXPR:
2023 case MODIFY_EXPR:
2024 case TARGET_EXPR:
2025 case COND_EXPR:
2026 case BIND_EXPR:
2027 break;
2029 default:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2032 break;
2033 return false;
2036 return true;
2039 /* Return an expr equal to X but certainly not valid as an lvalue. */
2041 tree
2042 non_lvalue_loc (location_t loc, tree x)
2044 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2045 us. */
2046 if (in_gimple_form)
2047 return x;
2049 if (! maybe_lvalue_p (x))
2050 return x;
2051 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2054 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2055 Zero means allow extended lvalues. */
2057 int pedantic_lvalues;
2059 /* When pedantic, return an expr equal to X but certainly not valid as a
2060 pedantic lvalue. Otherwise, return X. */
2062 static tree
2063 pedantic_non_lvalue_loc (location_t loc, tree x)
2065 if (pedantic_lvalues)
2066 return non_lvalue_loc (loc, x);
2068 return protected_set_expr_location_unshare (x, loc);
2071 /* Given a tree comparison code, return the code that is the logical inverse.
2072 It is generally not safe to do this for floating-point comparisons, except
2073 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2074 ERROR_MARK in this case. */
2076 enum tree_code
2077 invert_tree_comparison (enum tree_code code, bool honor_nans)
2079 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2080 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2081 return ERROR_MARK;
2083 switch (code)
2085 case EQ_EXPR:
2086 return NE_EXPR;
2087 case NE_EXPR:
2088 return EQ_EXPR;
2089 case GT_EXPR:
2090 return honor_nans ? UNLE_EXPR : LE_EXPR;
2091 case GE_EXPR:
2092 return honor_nans ? UNLT_EXPR : LT_EXPR;
2093 case LT_EXPR:
2094 return honor_nans ? UNGE_EXPR : GE_EXPR;
2095 case LE_EXPR:
2096 return honor_nans ? UNGT_EXPR : GT_EXPR;
2097 case LTGT_EXPR:
2098 return UNEQ_EXPR;
2099 case UNEQ_EXPR:
2100 return LTGT_EXPR;
2101 case UNGT_EXPR:
2102 return LE_EXPR;
2103 case UNGE_EXPR:
2104 return LT_EXPR;
2105 case UNLT_EXPR:
2106 return GE_EXPR;
2107 case UNLE_EXPR:
2108 return GT_EXPR;
2109 case ORDERED_EXPR:
2110 return UNORDERED_EXPR;
2111 case UNORDERED_EXPR:
2112 return ORDERED_EXPR;
2113 default:
2114 gcc_unreachable ();
2118 /* Similar, but return the comparison that results if the operands are
2119 swapped. This is safe for floating-point. */
2121 enum tree_code
2122 swap_tree_comparison (enum tree_code code)
2124 switch (code)
2126 case EQ_EXPR:
2127 case NE_EXPR:
2128 case ORDERED_EXPR:
2129 case UNORDERED_EXPR:
2130 case LTGT_EXPR:
2131 case UNEQ_EXPR:
2132 return code;
2133 case GT_EXPR:
2134 return LT_EXPR;
2135 case GE_EXPR:
2136 return LE_EXPR;
2137 case LT_EXPR:
2138 return GT_EXPR;
2139 case LE_EXPR:
2140 return GE_EXPR;
2141 case UNGT_EXPR:
2142 return UNLT_EXPR;
2143 case UNGE_EXPR:
2144 return UNLE_EXPR;
2145 case UNLT_EXPR:
2146 return UNGT_EXPR;
2147 case UNLE_EXPR:
2148 return UNGE_EXPR;
2149 default:
2150 gcc_unreachable ();
2155 /* Convert a comparison tree code from an enum tree_code representation
2156 into a compcode bit-based encoding. This function is the inverse of
2157 compcode_to_comparison. */
2159 static enum comparison_code
2160 comparison_to_compcode (enum tree_code code)
2162 switch (code)
2164 case LT_EXPR:
2165 return COMPCODE_LT;
2166 case EQ_EXPR:
2167 return COMPCODE_EQ;
2168 case LE_EXPR:
2169 return COMPCODE_LE;
2170 case GT_EXPR:
2171 return COMPCODE_GT;
2172 case NE_EXPR:
2173 return COMPCODE_NE;
2174 case GE_EXPR:
2175 return COMPCODE_GE;
2176 case ORDERED_EXPR:
2177 return COMPCODE_ORD;
2178 case UNORDERED_EXPR:
2179 return COMPCODE_UNORD;
2180 case UNLT_EXPR:
2181 return COMPCODE_UNLT;
2182 case UNEQ_EXPR:
2183 return COMPCODE_UNEQ;
2184 case UNLE_EXPR:
2185 return COMPCODE_UNLE;
2186 case UNGT_EXPR:
2187 return COMPCODE_UNGT;
2188 case LTGT_EXPR:
2189 return COMPCODE_LTGT;
2190 case UNGE_EXPR:
2191 return COMPCODE_UNGE;
2192 default:
2193 gcc_unreachable ();
2197 /* Convert a compcode bit-based encoding of a comparison operator back
2198 to GCC's enum tree_code representation. This function is the
2199 inverse of comparison_to_compcode. */
2201 static enum tree_code
2202 compcode_to_comparison (enum comparison_code code)
2204 switch (code)
2206 case COMPCODE_LT:
2207 return LT_EXPR;
2208 case COMPCODE_EQ:
2209 return EQ_EXPR;
2210 case COMPCODE_LE:
2211 return LE_EXPR;
2212 case COMPCODE_GT:
2213 return GT_EXPR;
2214 case COMPCODE_NE:
2215 return NE_EXPR;
2216 case COMPCODE_GE:
2217 return GE_EXPR;
2218 case COMPCODE_ORD:
2219 return ORDERED_EXPR;
2220 case COMPCODE_UNORD:
2221 return UNORDERED_EXPR;
2222 case COMPCODE_UNLT:
2223 return UNLT_EXPR;
2224 case COMPCODE_UNEQ:
2225 return UNEQ_EXPR;
2226 case COMPCODE_UNLE:
2227 return UNLE_EXPR;
2228 case COMPCODE_UNGT:
2229 return UNGT_EXPR;
2230 case COMPCODE_LTGT:
2231 return LTGT_EXPR;
2232 case COMPCODE_UNGE:
2233 return UNGE_EXPR;
2234 default:
2235 gcc_unreachable ();
2239 /* Return a tree for the comparison which is the combination of
2240 doing the AND or OR (depending on CODE) of the two operations LCODE
2241 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2242 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2243 if this makes the transformation invalid. */
2245 tree
2246 combine_comparisons (location_t loc,
2247 enum tree_code code, enum tree_code lcode,
2248 enum tree_code rcode, tree truth_type,
2249 tree ll_arg, tree lr_arg)
2251 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2252 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2253 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2254 int compcode;
2256 switch (code)
2258 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2259 compcode = lcompcode & rcompcode;
2260 break;
2262 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2263 compcode = lcompcode | rcompcode;
2264 break;
2266 default:
2267 return NULL_TREE;
2270 if (!honor_nans)
2272 /* Eliminate unordered comparisons, as well as LTGT and ORD
2273 which are not used unless the mode has NaNs. */
2274 compcode &= ~COMPCODE_UNORD;
2275 if (compcode == COMPCODE_LTGT)
2276 compcode = COMPCODE_NE;
2277 else if (compcode == COMPCODE_ORD)
2278 compcode = COMPCODE_TRUE;
2280 else if (flag_trapping_math)
2282 /* Check that the original operation and the optimized ones will trap
2283 under the same condition. */
2284 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2285 && (lcompcode != COMPCODE_EQ)
2286 && (lcompcode != COMPCODE_ORD);
2287 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2288 && (rcompcode != COMPCODE_EQ)
2289 && (rcompcode != COMPCODE_ORD);
2290 bool trap = (compcode & COMPCODE_UNORD) == 0
2291 && (compcode != COMPCODE_EQ)
2292 && (compcode != COMPCODE_ORD);
2294 /* In a short-circuited boolean expression the LHS might be
2295 such that the RHS, if evaluated, will never trap. For
2296 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2297 if neither x nor y is NaN. (This is a mixed blessing: for
2298 example, the expression above will never trap, hence
2299 optimizing it to x < y would be invalid). */
2300 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2301 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2302 rtrap = false;
2304 /* If the comparison was short-circuited, and only the RHS
2305 trapped, we may now generate a spurious trap. */
2306 if (rtrap && !ltrap
2307 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2308 return NULL_TREE;
2310 /* If we changed the conditions that cause a trap, we lose. */
2311 if ((ltrap || rtrap) != trap)
2312 return NULL_TREE;
2315 if (compcode == COMPCODE_TRUE)
2316 return constant_boolean_node (true, truth_type);
2317 else if (compcode == COMPCODE_FALSE)
2318 return constant_boolean_node (false, truth_type);
2319 else
2321 enum tree_code tcode;
2323 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2324 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2359 || TREE_TYPE (arg0) == error_mark_node
2360 || TREE_TYPE (arg1) == error_mark_node)
2361 return 0;
2363 /* Similar, if either does not have a type (like a released SSA name),
2364 they aren't equal. */
2365 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2366 return 0;
2368 /* Check equality of integer constants before bailing out due to
2369 precision differences. */
2370 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2371 return tree_int_cst_equal (arg0, arg1);
2373 /* If both types don't have the same signedness, then we can't consider
2374 them equal. We must check this before the STRIP_NOPS calls
2375 because they may change the signedness of the arguments. As pointers
2376 strictly don't have a signedness, require either two pointers or
2377 two non-pointers as well. */
2378 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2379 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2380 return 0;
2382 /* We cannot consider pointers to different address space equal. */
2383 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2384 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2385 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2386 return 0;
2388 /* If both types don't have the same precision, then it is not safe
2389 to strip NOPs. */
2390 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2391 return 0;
2393 STRIP_NOPS (arg0);
2394 STRIP_NOPS (arg1);
2396 /* In case both args are comparisons but with different comparison
2397 code, try to swap the comparison operands of one arg to produce
2398 a match and compare that variant. */
2399 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2400 && COMPARISON_CLASS_P (arg0)
2401 && COMPARISON_CLASS_P (arg1))
2403 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2405 if (TREE_CODE (arg0) == swap_code)
2406 return operand_equal_p (TREE_OPERAND (arg0, 0),
2407 TREE_OPERAND (arg1, 1), flags)
2408 && operand_equal_p (TREE_OPERAND (arg0, 1),
2409 TREE_OPERAND (arg1, 0), flags);
2412 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2413 /* This is needed for conversions and for COMPONENT_REF.
2414 Might as well play it safe and always test this. */
2415 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2416 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2417 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2418 return 0;
2420 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2421 We don't care about side effects in that case because the SAVE_EXPR
2422 takes care of that for us. In all other cases, two expressions are
2423 equal if they have no side effects. If we have two identical
2424 expressions with side effects that should be treated the same due
2425 to the only side effects being identical SAVE_EXPR's, that will
2426 be detected in the recursive calls below.
2427 If we are taking an invariant address of two identical objects
2428 they are necessarily equal as well. */
2429 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2430 && (TREE_CODE (arg0) == SAVE_EXPR
2431 || (flags & OEP_CONSTANT_ADDRESS_OF)
2432 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2433 return 1;
2435 /* Next handle constant cases, those for which we can return 1 even
2436 if ONLY_CONST is set. */
2437 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2438 switch (TREE_CODE (arg0))
2440 case INTEGER_CST:
2441 return tree_int_cst_equal (arg0, arg1);
2443 case FIXED_CST:
2444 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2445 TREE_FIXED_CST (arg1));
2447 case REAL_CST:
2448 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2449 TREE_REAL_CST (arg1)))
2450 return 1;
2453 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2455 /* If we do not distinguish between signed and unsigned zero,
2456 consider them equal. */
2457 if (real_zerop (arg0) && real_zerop (arg1))
2458 return 1;
2460 return 0;
2462 case VECTOR_CST:
2464 unsigned i;
2466 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2467 return 0;
2469 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2471 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2472 VECTOR_CST_ELT (arg1, i), flags))
2473 return 0;
2475 return 1;
2478 case COMPLEX_CST:
2479 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2480 flags)
2481 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2482 flags));
2484 case STRING_CST:
2485 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2486 && ! memcmp (TREE_STRING_POINTER (arg0),
2487 TREE_STRING_POINTER (arg1),
2488 TREE_STRING_LENGTH (arg0)));
2490 case ADDR_EXPR:
2491 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2492 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2493 ? OEP_CONSTANT_ADDRESS_OF : 0);
2494 default:
2495 break;
2498 if (flags & OEP_ONLY_CONST)
2499 return 0;
2501 /* Define macros to test an operand from arg0 and arg1 for equality and a
2502 variant that allows null and views null as being different from any
2503 non-null value. In the latter case, if either is null, the both
2504 must be; otherwise, do the normal comparison. */
2505 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2506 TREE_OPERAND (arg1, N), flags)
2508 #define OP_SAME_WITH_NULL(N) \
2509 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2510 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2512 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2514 case tcc_unary:
2515 /* Two conversions are equal only if signedness and modes match. */
2516 switch (TREE_CODE (arg0))
2518 CASE_CONVERT:
2519 case FIX_TRUNC_EXPR:
2520 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2521 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2522 return 0;
2523 break;
2524 default:
2525 break;
2528 return OP_SAME (0);
2531 case tcc_comparison:
2532 case tcc_binary:
2533 if (OP_SAME (0) && OP_SAME (1))
2534 return 1;
2536 /* For commutative ops, allow the other order. */
2537 return (commutative_tree_code (TREE_CODE (arg0))
2538 && operand_equal_p (TREE_OPERAND (arg0, 0),
2539 TREE_OPERAND (arg1, 1), flags)
2540 && operand_equal_p (TREE_OPERAND (arg0, 1),
2541 TREE_OPERAND (arg1, 0), flags));
2543 case tcc_reference:
2544 /* If either of the pointer (or reference) expressions we are
2545 dereferencing contain a side effect, these cannot be equal,
2546 but their addresses can be. */
2547 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2548 && (TREE_SIDE_EFFECTS (arg0)
2549 || TREE_SIDE_EFFECTS (arg1)))
2550 return 0;
2552 switch (TREE_CODE (arg0))
2554 case INDIRECT_REF:
2555 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2556 return OP_SAME (0);
2558 case REALPART_EXPR:
2559 case IMAGPART_EXPR:
2560 return OP_SAME (0);
2562 case TARGET_MEM_REF:
2563 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2564 /* Require equal extra operands and then fall through to MEM_REF
2565 handling of the two common operands. */
2566 if (!OP_SAME_WITH_NULL (2)
2567 || !OP_SAME_WITH_NULL (3)
2568 || !OP_SAME_WITH_NULL (4))
2569 return 0;
2570 /* Fallthru. */
2571 case MEM_REF:
2572 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2573 /* Require equal access sizes, and similar pointer types.
2574 We can have incomplete types for array references of
2575 variable-sized arrays from the Fortran frontend
2576 though. Also verify the types are compatible. */
2577 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2578 || (TYPE_SIZE (TREE_TYPE (arg0))
2579 && TYPE_SIZE (TREE_TYPE (arg1))
2580 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2581 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2582 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2583 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2584 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2585 && OP_SAME (0) && OP_SAME (1));
2587 case ARRAY_REF:
2588 case ARRAY_RANGE_REF:
2589 /* Operands 2 and 3 may be null.
2590 Compare the array index by value if it is constant first as we
2591 may have different types but same value here. */
2592 if (!OP_SAME (0))
2593 return 0;
2594 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2595 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2596 TREE_OPERAND (arg1, 1))
2597 || OP_SAME (1))
2598 && OP_SAME_WITH_NULL (2)
2599 && OP_SAME_WITH_NULL (3));
2601 case COMPONENT_REF:
2602 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2603 may be NULL when we're called to compare MEM_EXPRs. */
2604 if (!OP_SAME_WITH_NULL (0))
2605 return 0;
2606 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2607 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2609 case BIT_FIELD_REF:
2610 if (!OP_SAME (0))
2611 return 0;
2612 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2613 return OP_SAME (1) && OP_SAME (2);
2615 default:
2616 return 0;
2619 case tcc_expression:
2620 switch (TREE_CODE (arg0))
2622 case ADDR_EXPR:
2623 case TRUTH_NOT_EXPR:
2624 return OP_SAME (0);
2626 case TRUTH_ANDIF_EXPR:
2627 case TRUTH_ORIF_EXPR:
2628 return OP_SAME (0) && OP_SAME (1);
2630 case FMA_EXPR:
2631 case WIDEN_MULT_PLUS_EXPR:
2632 case WIDEN_MULT_MINUS_EXPR:
2633 if (!OP_SAME (2))
2634 return 0;
2635 /* The multiplcation operands are commutative. */
2636 /* FALLTHRU */
2638 case TRUTH_AND_EXPR:
2639 case TRUTH_OR_EXPR:
2640 case TRUTH_XOR_EXPR:
2641 if (OP_SAME (0) && OP_SAME (1))
2642 return 1;
2644 /* Otherwise take into account this is a commutative operation. */
2645 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2646 TREE_OPERAND (arg1, 1), flags)
2647 && operand_equal_p (TREE_OPERAND (arg0, 1),
2648 TREE_OPERAND (arg1, 0), flags));
2650 case COND_EXPR:
2651 case VEC_COND_EXPR:
2652 case DOT_PROD_EXPR:
2653 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2655 default:
2656 return 0;
2659 case tcc_vl_exp:
2660 switch (TREE_CODE (arg0))
2662 case CALL_EXPR:
2663 /* If the CALL_EXPRs call different functions, then they
2664 clearly can not be equal. */
2665 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2666 flags))
2667 return 0;
2670 unsigned int cef = call_expr_flags (arg0);
2671 if (flags & OEP_PURE_SAME)
2672 cef &= ECF_CONST | ECF_PURE;
2673 else
2674 cef &= ECF_CONST;
2675 if (!cef)
2676 return 0;
2679 /* Now see if all the arguments are the same. */
2681 const_call_expr_arg_iterator iter0, iter1;
2682 const_tree a0, a1;
2683 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2684 a1 = first_const_call_expr_arg (arg1, &iter1);
2685 a0 && a1;
2686 a0 = next_const_call_expr_arg (&iter0),
2687 a1 = next_const_call_expr_arg (&iter1))
2688 if (! operand_equal_p (a0, a1, flags))
2689 return 0;
2691 /* If we get here and both argument lists are exhausted
2692 then the CALL_EXPRs are equal. */
2693 return ! (a0 || a1);
2695 default:
2696 return 0;
2699 case tcc_declaration:
2700 /* Consider __builtin_sqrt equal to sqrt. */
2701 return (TREE_CODE (arg0) == FUNCTION_DECL
2702 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2703 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2704 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2706 default:
2707 return 0;
2710 #undef OP_SAME
2711 #undef OP_SAME_WITH_NULL
2714 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2715 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2717 When in doubt, return 0. */
2719 static int
2720 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2722 int unsignedp1, unsignedpo;
2723 tree primarg0, primarg1, primother;
2724 unsigned int correct_width;
2726 if (operand_equal_p (arg0, arg1, 0))
2727 return 1;
2729 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2730 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2731 return 0;
2733 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2734 and see if the inner values are the same. This removes any
2735 signedness comparison, which doesn't matter here. */
2736 primarg0 = arg0, primarg1 = arg1;
2737 STRIP_NOPS (primarg0);
2738 STRIP_NOPS (primarg1);
2739 if (operand_equal_p (primarg0, primarg1, 0))
2740 return 1;
2742 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2743 actual comparison operand, ARG0.
2745 First throw away any conversions to wider types
2746 already present in the operands. */
2748 primarg1 = get_narrower (arg1, &unsignedp1);
2749 primother = get_narrower (other, &unsignedpo);
2751 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2752 if (unsignedp1 == unsignedpo
2753 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2754 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2756 tree type = TREE_TYPE (arg0);
2758 /* Make sure shorter operand is extended the right way
2759 to match the longer operand. */
2760 primarg1 = fold_convert (signed_or_unsigned_type_for
2761 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2763 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2764 return 1;
2767 return 0;
2770 /* See if ARG is an expression that is either a comparison or is performing
2771 arithmetic on comparisons. The comparisons must only be comparing
2772 two different values, which will be stored in *CVAL1 and *CVAL2; if
2773 they are nonzero it means that some operands have already been found.
2774 No variables may be used anywhere else in the expression except in the
2775 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2776 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2778 If this is true, return 1. Otherwise, return zero. */
2780 static int
2781 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2783 enum tree_code code = TREE_CODE (arg);
2784 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2786 /* We can handle some of the tcc_expression cases here. */
2787 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2788 tclass = tcc_unary;
2789 else if (tclass == tcc_expression
2790 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2791 || code == COMPOUND_EXPR))
2792 tclass = tcc_binary;
2794 else if (tclass == tcc_expression && code == SAVE_EXPR
2795 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2797 /* If we've already found a CVAL1 or CVAL2, this expression is
2798 two complex to handle. */
2799 if (*cval1 || *cval2)
2800 return 0;
2802 tclass = tcc_unary;
2803 *save_p = 1;
2806 switch (tclass)
2808 case tcc_unary:
2809 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2811 case tcc_binary:
2812 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2813 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2814 cval1, cval2, save_p));
2816 case tcc_constant:
2817 return 1;
2819 case tcc_expression:
2820 if (code == COND_EXPR)
2821 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2822 cval1, cval2, save_p)
2823 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2824 cval1, cval2, save_p)
2825 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2826 cval1, cval2, save_p));
2827 return 0;
2829 case tcc_comparison:
2830 /* First see if we can handle the first operand, then the second. For
2831 the second operand, we know *CVAL1 can't be zero. It must be that
2832 one side of the comparison is each of the values; test for the
2833 case where this isn't true by failing if the two operands
2834 are the same. */
2836 if (operand_equal_p (TREE_OPERAND (arg, 0),
2837 TREE_OPERAND (arg, 1), 0))
2838 return 0;
2840 if (*cval1 == 0)
2841 *cval1 = TREE_OPERAND (arg, 0);
2842 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2844 else if (*cval2 == 0)
2845 *cval2 = TREE_OPERAND (arg, 0);
2846 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2848 else
2849 return 0;
2851 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2853 else if (*cval2 == 0)
2854 *cval2 = TREE_OPERAND (arg, 1);
2855 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2857 else
2858 return 0;
2860 return 1;
2862 default:
2863 return 0;
2867 /* ARG is a tree that is known to contain just arithmetic operations and
2868 comparisons. Evaluate the operations in the tree substituting NEW0 for
2869 any occurrence of OLD0 as an operand of a comparison and likewise for
2870 NEW1 and OLD1. */
2872 static tree
2873 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2874 tree old1, tree new1)
2876 tree type = TREE_TYPE (arg);
2877 enum tree_code code = TREE_CODE (arg);
2878 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2880 /* We can handle some of the tcc_expression cases here. */
2881 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2882 tclass = tcc_unary;
2883 else if (tclass == tcc_expression
2884 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2885 tclass = tcc_binary;
2887 switch (tclass)
2889 case tcc_unary:
2890 return fold_build1_loc (loc, code, type,
2891 eval_subst (loc, TREE_OPERAND (arg, 0),
2892 old0, new0, old1, new1));
2894 case tcc_binary:
2895 return fold_build2_loc (loc, code, type,
2896 eval_subst (loc, TREE_OPERAND (arg, 0),
2897 old0, new0, old1, new1),
2898 eval_subst (loc, TREE_OPERAND (arg, 1),
2899 old0, new0, old1, new1));
2901 case tcc_expression:
2902 switch (code)
2904 case SAVE_EXPR:
2905 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2906 old1, new1);
2908 case COMPOUND_EXPR:
2909 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2910 old1, new1);
2912 case COND_EXPR:
2913 return fold_build3_loc (loc, code, type,
2914 eval_subst (loc, TREE_OPERAND (arg, 0),
2915 old0, new0, old1, new1),
2916 eval_subst (loc, TREE_OPERAND (arg, 1),
2917 old0, new0, old1, new1),
2918 eval_subst (loc, TREE_OPERAND (arg, 2),
2919 old0, new0, old1, new1));
2920 default:
2921 break;
2923 /* Fall through - ??? */
2925 case tcc_comparison:
2927 tree arg0 = TREE_OPERAND (arg, 0);
2928 tree arg1 = TREE_OPERAND (arg, 1);
2930 /* We need to check both for exact equality and tree equality. The
2931 former will be true if the operand has a side-effect. In that
2932 case, we know the operand occurred exactly once. */
2934 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2935 arg0 = new0;
2936 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2937 arg0 = new1;
2939 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2940 arg1 = new0;
2941 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2942 arg1 = new1;
2944 return fold_build2_loc (loc, code, type, arg0, arg1);
2947 default:
2948 return arg;
2952 /* Return a tree for the case when the result of an expression is RESULT
2953 converted to TYPE and OMITTED was previously an operand of the expression
2954 but is now not needed (e.g., we folded OMITTED * 0).
2956 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2957 the conversion of RESULT to TYPE. */
2959 tree
2960 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2962 tree t = fold_convert_loc (loc, type, result);
2964 /* If the resulting operand is an empty statement, just return the omitted
2965 statement casted to void. */
2966 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2967 return build1_loc (loc, NOP_EXPR, void_type_node,
2968 fold_ignored_result (omitted));
2970 if (TREE_SIDE_EFFECTS (omitted))
2971 return build2_loc (loc, COMPOUND_EXPR, type,
2972 fold_ignored_result (omitted), t);
2974 return non_lvalue_loc (loc, t);
2977 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2979 static tree
2980 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2981 tree omitted)
2983 tree t = fold_convert_loc (loc, type, result);
2985 /* If the resulting operand is an empty statement, just return the omitted
2986 statement casted to void. */
2987 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2988 return build1_loc (loc, NOP_EXPR, void_type_node,
2989 fold_ignored_result (omitted));
2991 if (TREE_SIDE_EFFECTS (omitted))
2992 return build2_loc (loc, COMPOUND_EXPR, type,
2993 fold_ignored_result (omitted), t);
2995 return pedantic_non_lvalue_loc (loc, t);
2998 /* Return a tree for the case when the result of an expression is RESULT
2999 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3000 of the expression but are now not needed.
3002 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3003 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3004 evaluated before OMITTED2. Otherwise, if neither has side effects,
3005 just do the conversion of RESULT to TYPE. */
3007 tree
3008 omit_two_operands_loc (location_t loc, tree type, tree result,
3009 tree omitted1, tree omitted2)
3011 tree t = fold_convert_loc (loc, type, result);
3013 if (TREE_SIDE_EFFECTS (omitted2))
3014 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3015 if (TREE_SIDE_EFFECTS (omitted1))
3016 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3018 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3022 /* Return a simplified tree node for the truth-negation of ARG. This
3023 never alters ARG itself. We assume that ARG is an operation that
3024 returns a truth value (0 or 1).
3026 FIXME: one would think we would fold the result, but it causes
3027 problems with the dominator optimizer. */
3029 tree
3030 fold_truth_not_expr (location_t loc, tree arg)
3032 tree type = TREE_TYPE (arg);
3033 enum tree_code code = TREE_CODE (arg);
3034 location_t loc1, loc2;
3036 /* If this is a comparison, we can simply invert it, except for
3037 floating-point non-equality comparisons, in which case we just
3038 enclose a TRUTH_NOT_EXPR around what we have. */
3040 if (TREE_CODE_CLASS (code) == tcc_comparison)
3042 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3043 if (FLOAT_TYPE_P (op_type)
3044 && flag_trapping_math
3045 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3046 && code != NE_EXPR && code != EQ_EXPR)
3047 return NULL_TREE;
3049 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3050 if (code == ERROR_MARK)
3051 return NULL_TREE;
3053 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3054 TREE_OPERAND (arg, 1));
3057 switch (code)
3059 case INTEGER_CST:
3060 return constant_boolean_node (integer_zerop (arg), type);
3062 case TRUTH_AND_EXPR:
3063 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3064 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3065 return build2_loc (loc, TRUTH_OR_EXPR, type,
3066 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3067 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3069 case TRUTH_OR_EXPR:
3070 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3071 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3072 return build2_loc (loc, TRUTH_AND_EXPR, type,
3073 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3074 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3076 case TRUTH_XOR_EXPR:
3077 /* Here we can invert either operand. We invert the first operand
3078 unless the second operand is a TRUTH_NOT_EXPR in which case our
3079 result is the XOR of the first operand with the inside of the
3080 negation of the second operand. */
3082 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3083 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3084 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3085 else
3086 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3087 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3088 TREE_OPERAND (arg, 1));
3090 case TRUTH_ANDIF_EXPR:
3091 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3092 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3093 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3094 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3095 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3097 case TRUTH_ORIF_EXPR:
3098 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3099 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3100 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3101 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3102 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3104 case TRUTH_NOT_EXPR:
3105 return TREE_OPERAND (arg, 0);
3107 case COND_EXPR:
3109 tree arg1 = TREE_OPERAND (arg, 1);
3110 tree arg2 = TREE_OPERAND (arg, 2);
3112 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3113 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3115 /* A COND_EXPR may have a throw as one operand, which
3116 then has void type. Just leave void operands
3117 as they are. */
3118 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3119 VOID_TYPE_P (TREE_TYPE (arg1))
3120 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3121 VOID_TYPE_P (TREE_TYPE (arg2))
3122 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3125 case COMPOUND_EXPR:
3126 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3127 return build2_loc (loc, COMPOUND_EXPR, type,
3128 TREE_OPERAND (arg, 0),
3129 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3131 case NON_LVALUE_EXPR:
3132 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3133 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3135 CASE_CONVERT:
3136 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3137 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3139 /* ... fall through ... */
3141 case FLOAT_EXPR:
3142 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3143 return build1_loc (loc, TREE_CODE (arg), type,
3144 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3146 case BIT_AND_EXPR:
3147 if (!integer_onep (TREE_OPERAND (arg, 1)))
3148 return NULL_TREE;
3149 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3151 case SAVE_EXPR:
3152 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3154 case CLEANUP_POINT_EXPR:
3155 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3156 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3157 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3159 default:
3160 return NULL_TREE;
3164 /* Return a simplified tree node for the truth-negation of ARG. This
3165 never alters ARG itself. We assume that ARG is an operation that
3166 returns a truth value (0 or 1).
3168 FIXME: one would think we would fold the result, but it causes
3169 problems with the dominator optimizer. */
3171 tree
3172 invert_truthvalue_loc (location_t loc, tree arg)
3174 tree tem;
3176 if (TREE_CODE (arg) == ERROR_MARK)
3177 return arg;
3179 tem = fold_truth_not_expr (loc, arg);
3180 if (!tem)
3181 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3183 return tem;
3186 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3187 operands are another bit-wise operation with a common input. If so,
3188 distribute the bit operations to save an operation and possibly two if
3189 constants are involved. For example, convert
3190 (A | B) & (A | C) into A | (B & C)
3191 Further simplification will occur if B and C are constants.
3193 If this optimization cannot be done, 0 will be returned. */
3195 static tree
3196 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3197 tree arg0, tree arg1)
3199 tree common;
3200 tree left, right;
3202 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3203 || TREE_CODE (arg0) == code
3204 || (TREE_CODE (arg0) != BIT_AND_EXPR
3205 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3206 return 0;
3208 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3210 common = TREE_OPERAND (arg0, 0);
3211 left = TREE_OPERAND (arg0, 1);
3212 right = TREE_OPERAND (arg1, 1);
3214 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3216 common = TREE_OPERAND (arg0, 0);
3217 left = TREE_OPERAND (arg0, 1);
3218 right = TREE_OPERAND (arg1, 0);
3220 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3222 common = TREE_OPERAND (arg0, 1);
3223 left = TREE_OPERAND (arg0, 0);
3224 right = TREE_OPERAND (arg1, 1);
3226 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3228 common = TREE_OPERAND (arg0, 1);
3229 left = TREE_OPERAND (arg0, 0);
3230 right = TREE_OPERAND (arg1, 0);
3232 else
3233 return 0;
3235 common = fold_convert_loc (loc, type, common);
3236 left = fold_convert_loc (loc, type, left);
3237 right = fold_convert_loc (loc, type, right);
3238 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3239 fold_build2_loc (loc, code, type, left, right));
3242 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3243 with code CODE. This optimization is unsafe. */
3244 static tree
3245 distribute_real_division (location_t loc, enum tree_code code, tree type,
3246 tree arg0, tree arg1)
3248 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3249 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3251 /* (A / C) +- (B / C) -> (A +- B) / C. */
3252 if (mul0 == mul1
3253 && operand_equal_p (TREE_OPERAND (arg0, 1),
3254 TREE_OPERAND (arg1, 1), 0))
3255 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3256 fold_build2_loc (loc, code, type,
3257 TREE_OPERAND (arg0, 0),
3258 TREE_OPERAND (arg1, 0)),
3259 TREE_OPERAND (arg0, 1));
3261 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3262 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3263 TREE_OPERAND (arg1, 0), 0)
3264 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3265 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3267 REAL_VALUE_TYPE r0, r1;
3268 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3269 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3270 if (!mul0)
3271 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3272 if (!mul1)
3273 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3274 real_arithmetic (&r0, code, &r0, &r1);
3275 return fold_build2_loc (loc, MULT_EXPR, type,
3276 TREE_OPERAND (arg0, 0),
3277 build_real (type, r0));
3280 return NULL_TREE;
3283 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3284 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3286 static tree
3287 make_bit_field_ref (location_t loc, tree inner, tree type,
3288 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3290 tree result, bftype;
3292 if (bitpos == 0)
3294 tree size = TYPE_SIZE (TREE_TYPE (inner));
3295 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3296 || POINTER_TYPE_P (TREE_TYPE (inner)))
3297 && host_integerp (size, 0)
3298 && tree_low_cst (size, 0) == bitsize)
3299 return fold_convert_loc (loc, type, inner);
3302 bftype = type;
3303 if (TYPE_PRECISION (bftype) != bitsize
3304 || TYPE_UNSIGNED (bftype) == !unsignedp)
3305 bftype = build_nonstandard_integer_type (bitsize, 0);
3307 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3308 size_int (bitsize), bitsize_int (bitpos));
3310 if (bftype != type)
3311 result = fold_convert_loc (loc, type, result);
3313 return result;
3316 /* Optimize a bit-field compare.
3318 There are two cases: First is a compare against a constant and the
3319 second is a comparison of two items where the fields are at the same
3320 bit position relative to the start of a chunk (byte, halfword, word)
3321 large enough to contain it. In these cases we can avoid the shift
3322 implicit in bitfield extractions.
3324 For constants, we emit a compare of the shifted constant with the
3325 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3326 compared. For two fields at the same position, we do the ANDs with the
3327 similar mask and compare the result of the ANDs.
3329 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3330 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3331 are the left and right operands of the comparison, respectively.
3333 If the optimization described above can be done, we return the resulting
3334 tree. Otherwise we return zero. */
3336 static tree
3337 optimize_bit_field_compare (location_t loc, enum tree_code code,
3338 tree compare_type, tree lhs, tree rhs)
3340 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3341 tree type = TREE_TYPE (lhs);
3342 tree signed_type, unsigned_type;
3343 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3344 enum machine_mode lmode, rmode, nmode;
3345 int lunsignedp, runsignedp;
3346 int lvolatilep = 0, rvolatilep = 0;
3347 tree linner, rinner = NULL_TREE;
3348 tree mask;
3349 tree offset;
3351 /* In the strict volatile bitfields case, doing code changes here may prevent
3352 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3353 if (flag_strict_volatile_bitfields > 0)
3354 return 0;
3356 /* Get all the information about the extractions being done. If the bit size
3357 if the same as the size of the underlying object, we aren't doing an
3358 extraction at all and so can do nothing. We also don't want to
3359 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3360 then will no longer be able to replace it. */
3361 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3362 &lunsignedp, &lvolatilep, false);
3363 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3364 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3365 return 0;
3367 if (!const_p)
3369 /* If this is not a constant, we can only do something if bit positions,
3370 sizes, and signedness are the same. */
3371 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3372 &runsignedp, &rvolatilep, false);
3374 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3375 || lunsignedp != runsignedp || offset != 0
3376 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3377 return 0;
3380 /* See if we can find a mode to refer to this field. We should be able to,
3381 but fail if we can't. */
3382 if (lvolatilep
3383 && GET_MODE_BITSIZE (lmode) > 0
3384 && flag_strict_volatile_bitfields > 0)
3385 nmode = lmode;
3386 else
3387 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3388 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3389 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3390 TYPE_ALIGN (TREE_TYPE (rinner))),
3391 word_mode, lvolatilep || rvolatilep);
3392 if (nmode == VOIDmode)
3393 return 0;
3395 /* Set signed and unsigned types of the precision of this mode for the
3396 shifts below. */
3397 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3398 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3400 /* Compute the bit position and size for the new reference and our offset
3401 within it. If the new reference is the same size as the original, we
3402 won't optimize anything, so return zero. */
3403 nbitsize = GET_MODE_BITSIZE (nmode);
3404 nbitpos = lbitpos & ~ (nbitsize - 1);
3405 lbitpos -= nbitpos;
3406 if (nbitsize == lbitsize)
3407 return 0;
3409 if (BYTES_BIG_ENDIAN)
3410 lbitpos = nbitsize - lbitsize - lbitpos;
3412 /* Make the mask to be used against the extracted field. */
3413 mask = build_int_cst_type (unsigned_type, -1);
3414 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3415 mask = const_binop (RSHIFT_EXPR, mask,
3416 size_int (nbitsize - lbitsize - lbitpos));
3418 if (! const_p)
3419 /* If not comparing with constant, just rework the comparison
3420 and return. */
3421 return fold_build2_loc (loc, code, compare_type,
3422 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3423 make_bit_field_ref (loc, linner,
3424 unsigned_type,
3425 nbitsize, nbitpos,
3427 mask),
3428 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3429 make_bit_field_ref (loc, rinner,
3430 unsigned_type,
3431 nbitsize, nbitpos,
3433 mask));
3435 /* Otherwise, we are handling the constant case. See if the constant is too
3436 big for the field. Warn and return a tree of for 0 (false) if so. We do
3437 this not only for its own sake, but to avoid having to test for this
3438 error case below. If we didn't, we might generate wrong code.
3440 For unsigned fields, the constant shifted right by the field length should
3441 be all zero. For signed fields, the high-order bits should agree with
3442 the sign bit. */
3444 if (lunsignedp)
3446 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3447 fold_convert_loc (loc,
3448 unsigned_type, rhs),
3449 size_int (lbitsize))))
3451 warning (0, "comparison is always %d due to width of bit-field",
3452 code == NE_EXPR);
3453 return constant_boolean_node (code == NE_EXPR, compare_type);
3456 else
3458 tree tem = const_binop (RSHIFT_EXPR,
3459 fold_convert_loc (loc, signed_type, rhs),
3460 size_int (lbitsize - 1));
3461 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3463 warning (0, "comparison is always %d due to width of bit-field",
3464 code == NE_EXPR);
3465 return constant_boolean_node (code == NE_EXPR, compare_type);
3469 /* Single-bit compares should always be against zero. */
3470 if (lbitsize == 1 && ! integer_zerop (rhs))
3472 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3473 rhs = build_int_cst (type, 0);
3476 /* Make a new bitfield reference, shift the constant over the
3477 appropriate number of bits and mask it with the computed mask
3478 (in case this was a signed field). If we changed it, make a new one. */
3479 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3480 if (lvolatilep)
3482 TREE_SIDE_EFFECTS (lhs) = 1;
3483 TREE_THIS_VOLATILE (lhs) = 1;
3486 rhs = const_binop (BIT_AND_EXPR,
3487 const_binop (LSHIFT_EXPR,
3488 fold_convert_loc (loc, unsigned_type, rhs),
3489 size_int (lbitpos)),
3490 mask);
3492 lhs = build2_loc (loc, code, compare_type,
3493 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3494 return lhs;
3497 /* Subroutine for fold_truth_andor_1: decode a field reference.
3499 If EXP is a comparison reference, we return the innermost reference.
3501 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3502 set to the starting bit number.
3504 If the innermost field can be completely contained in a mode-sized
3505 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3507 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3508 otherwise it is not changed.
3510 *PUNSIGNEDP is set to the signedness of the field.
3512 *PMASK is set to the mask used. This is either contained in a
3513 BIT_AND_EXPR or derived from the width of the field.
3515 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3517 Return 0 if this is not a component reference or is one that we can't
3518 do anything with. */
3520 static tree
3521 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3522 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3523 int *punsignedp, int *pvolatilep,
3524 tree *pmask, tree *pand_mask)
3526 tree outer_type = 0;
3527 tree and_mask = 0;
3528 tree mask, inner, offset;
3529 tree unsigned_type;
3530 unsigned int precision;
3532 /* All the optimizations using this function assume integer fields.
3533 There are problems with FP fields since the type_for_size call
3534 below can fail for, e.g., XFmode. */
3535 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3536 return 0;
3538 /* We are interested in the bare arrangement of bits, so strip everything
3539 that doesn't affect the machine mode. However, record the type of the
3540 outermost expression if it may matter below. */
3541 if (CONVERT_EXPR_P (exp)
3542 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3543 outer_type = TREE_TYPE (exp);
3544 STRIP_NOPS (exp);
3546 if (TREE_CODE (exp) == BIT_AND_EXPR)
3548 and_mask = TREE_OPERAND (exp, 1);
3549 exp = TREE_OPERAND (exp, 0);
3550 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3551 if (TREE_CODE (and_mask) != INTEGER_CST)
3552 return 0;
3555 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3556 punsignedp, pvolatilep, false);
3557 if ((inner == exp && and_mask == 0)
3558 || *pbitsize < 0 || offset != 0
3559 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3560 return 0;
3562 /* If the number of bits in the reference is the same as the bitsize of
3563 the outer type, then the outer type gives the signedness. Otherwise
3564 (in case of a small bitfield) the signedness is unchanged. */
3565 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3566 *punsignedp = TYPE_UNSIGNED (outer_type);
3568 /* Compute the mask to access the bitfield. */
3569 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3570 precision = TYPE_PRECISION (unsigned_type);
3572 mask = build_int_cst_type (unsigned_type, -1);
3574 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3575 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3577 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3578 if (and_mask != 0)
3579 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3580 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3582 *pmask = mask;
3583 *pand_mask = and_mask;
3584 return inner;
3587 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3588 bit positions. */
3590 static int
3591 all_ones_mask_p (const_tree mask, int size)
3593 tree type = TREE_TYPE (mask);
3594 unsigned int precision = TYPE_PRECISION (type);
3595 tree tmask;
3597 tmask = build_int_cst_type (signed_type_for (type), -1);
3599 return
3600 tree_int_cst_equal (mask,
3601 const_binop (RSHIFT_EXPR,
3602 const_binop (LSHIFT_EXPR, tmask,
3603 size_int (precision - size)),
3604 size_int (precision - size)));
3607 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3608 represents the sign bit of EXP's type. If EXP represents a sign
3609 or zero extension, also test VAL against the unextended type.
3610 The return value is the (sub)expression whose sign bit is VAL,
3611 or NULL_TREE otherwise. */
3613 static tree
3614 sign_bit_p (tree exp, const_tree val)
3616 unsigned HOST_WIDE_INT mask_lo, lo;
3617 HOST_WIDE_INT mask_hi, hi;
3618 int width;
3619 tree t;
3621 /* Tree EXP must have an integral type. */
3622 t = TREE_TYPE (exp);
3623 if (! INTEGRAL_TYPE_P (t))
3624 return NULL_TREE;
3626 /* Tree VAL must be an integer constant. */
3627 if (TREE_CODE (val) != INTEGER_CST
3628 || TREE_OVERFLOW (val))
3629 return NULL_TREE;
3631 width = TYPE_PRECISION (t);
3632 if (width > HOST_BITS_PER_WIDE_INT)
3634 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3635 lo = 0;
3637 mask_hi = ((unsigned HOST_WIDE_INT) -1
3638 >> (HOST_BITS_PER_DOUBLE_INT - width));
3639 mask_lo = -1;
3641 else
3643 hi = 0;
3644 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3646 mask_hi = 0;
3647 mask_lo = ((unsigned HOST_WIDE_INT) -1
3648 >> (HOST_BITS_PER_WIDE_INT - width));
3651 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3652 treat VAL as if it were unsigned. */
3653 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3654 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3655 return exp;
3657 /* Handle extension from a narrower type. */
3658 if (TREE_CODE (exp) == NOP_EXPR
3659 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3660 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3662 return NULL_TREE;
3665 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3666 to be evaluated unconditionally. */
3668 static int
3669 simple_operand_p (const_tree exp)
3671 /* Strip any conversions that don't change the machine mode. */
3672 STRIP_NOPS (exp);
3674 return (CONSTANT_CLASS_P (exp)
3675 || TREE_CODE (exp) == SSA_NAME
3676 || (DECL_P (exp)
3677 && ! TREE_ADDRESSABLE (exp)
3678 && ! TREE_THIS_VOLATILE (exp)
3679 && ! DECL_NONLOCAL (exp)
3680 /* Don't regard global variables as simple. They may be
3681 allocated in ways unknown to the compiler (shared memory,
3682 #pragma weak, etc). */
3683 && ! TREE_PUBLIC (exp)
3684 && ! DECL_EXTERNAL (exp)
3685 /* Loading a static variable is unduly expensive, but global
3686 registers aren't expensive. */
3687 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3690 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3691 to be evaluated unconditionally.
3692 I addition to simple_operand_p, we assume that comparisons, conversions,
3693 and logic-not operations are simple, if their operands are simple, too. */
3695 static bool
3696 simple_operand_p_2 (tree exp)
3698 enum tree_code code;
3700 if (TREE_SIDE_EFFECTS (exp)
3701 || tree_could_trap_p (exp))
3702 return false;
3704 while (CONVERT_EXPR_P (exp))
3705 exp = TREE_OPERAND (exp, 0);
3707 code = TREE_CODE (exp);
3709 if (TREE_CODE_CLASS (code) == tcc_comparison)
3710 return (simple_operand_p (TREE_OPERAND (exp, 0))
3711 && simple_operand_p (TREE_OPERAND (exp, 1)));
3713 if (code == TRUTH_NOT_EXPR)
3714 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3716 return simple_operand_p (exp);
3720 /* The following functions are subroutines to fold_range_test and allow it to
3721 try to change a logical combination of comparisons into a range test.
3723 For example, both
3724 X == 2 || X == 3 || X == 4 || X == 5
3726 X >= 2 && X <= 5
3727 are converted to
3728 (unsigned) (X - 2) <= 3
3730 We describe each set of comparisons as being either inside or outside
3731 a range, using a variable named like IN_P, and then describe the
3732 range with a lower and upper bound. If one of the bounds is omitted,
3733 it represents either the highest or lowest value of the type.
3735 In the comments below, we represent a range by two numbers in brackets
3736 preceded by a "+" to designate being inside that range, or a "-" to
3737 designate being outside that range, so the condition can be inverted by
3738 flipping the prefix. An omitted bound is represented by a "-". For
3739 example, "- [-, 10]" means being outside the range starting at the lowest
3740 possible value and ending at 10, in other words, being greater than 10.
3741 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3742 always false.
3744 We set up things so that the missing bounds are handled in a consistent
3745 manner so neither a missing bound nor "true" and "false" need to be
3746 handled using a special case. */
3748 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3749 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3750 and UPPER1_P are nonzero if the respective argument is an upper bound
3751 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3752 must be specified for a comparison. ARG1 will be converted to ARG0's
3753 type if both are specified. */
3755 static tree
3756 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3757 tree arg1, int upper1_p)
3759 tree tem;
3760 int result;
3761 int sgn0, sgn1;
3763 /* If neither arg represents infinity, do the normal operation.
3764 Else, if not a comparison, return infinity. Else handle the special
3765 comparison rules. Note that most of the cases below won't occur, but
3766 are handled for consistency. */
3768 if (arg0 != 0 && arg1 != 0)
3770 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3771 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3772 STRIP_NOPS (tem);
3773 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3776 if (TREE_CODE_CLASS (code) != tcc_comparison)
3777 return 0;
3779 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3780 for neither. In real maths, we cannot assume open ended ranges are
3781 the same. But, this is computer arithmetic, where numbers are finite.
3782 We can therefore make the transformation of any unbounded range with
3783 the value Z, Z being greater than any representable number. This permits
3784 us to treat unbounded ranges as equal. */
3785 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3786 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3787 switch (code)
3789 case EQ_EXPR:
3790 result = sgn0 == sgn1;
3791 break;
3792 case NE_EXPR:
3793 result = sgn0 != sgn1;
3794 break;
3795 case LT_EXPR:
3796 result = sgn0 < sgn1;
3797 break;
3798 case LE_EXPR:
3799 result = sgn0 <= sgn1;
3800 break;
3801 case GT_EXPR:
3802 result = sgn0 > sgn1;
3803 break;
3804 case GE_EXPR:
3805 result = sgn0 >= sgn1;
3806 break;
3807 default:
3808 gcc_unreachable ();
3811 return constant_boolean_node (result, type);
3814 /* Helper routine for make_range. Perform one step for it, return
3815 new expression if the loop should continue or NULL_TREE if it should
3816 stop. */
3818 tree
3819 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3820 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3821 bool *strict_overflow_p)
3823 tree arg0_type = TREE_TYPE (arg0);
3824 tree n_low, n_high, low = *p_low, high = *p_high;
3825 int in_p = *p_in_p, n_in_p;
3827 switch (code)
3829 case TRUTH_NOT_EXPR:
3830 /* We can only do something if the range is testing for zero. */
3831 if (low == NULL_TREE || high == NULL_TREE
3832 || ! integer_zerop (low) || ! integer_zerop (high))
3833 return NULL_TREE;
3834 *p_in_p = ! in_p;
3835 return arg0;
3837 case EQ_EXPR: case NE_EXPR:
3838 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3839 /* We can only do something if the range is testing for zero
3840 and if the second operand is an integer constant. Note that
3841 saying something is "in" the range we make is done by
3842 complementing IN_P since it will set in the initial case of
3843 being not equal to zero; "out" is leaving it alone. */
3844 if (low == NULL_TREE || high == NULL_TREE
3845 || ! integer_zerop (low) || ! integer_zerop (high)
3846 || TREE_CODE (arg1) != INTEGER_CST)
3847 return NULL_TREE;
3849 switch (code)
3851 case NE_EXPR: /* - [c, c] */
3852 low = high = arg1;
3853 break;
3854 case EQ_EXPR: /* + [c, c] */
3855 in_p = ! in_p, low = high = arg1;
3856 break;
3857 case GT_EXPR: /* - [-, c] */
3858 low = 0, high = arg1;
3859 break;
3860 case GE_EXPR: /* + [c, -] */
3861 in_p = ! in_p, low = arg1, high = 0;
3862 break;
3863 case LT_EXPR: /* - [c, -] */
3864 low = arg1, high = 0;
3865 break;
3866 case LE_EXPR: /* + [-, c] */
3867 in_p = ! in_p, low = 0, high = arg1;
3868 break;
3869 default:
3870 gcc_unreachable ();
3873 /* If this is an unsigned comparison, we also know that EXP is
3874 greater than or equal to zero. We base the range tests we make
3875 on that fact, so we record it here so we can parse existing
3876 range tests. We test arg0_type since often the return type
3877 of, e.g. EQ_EXPR, is boolean. */
3878 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3880 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3881 in_p, low, high, 1,
3882 build_int_cst (arg0_type, 0),
3883 NULL_TREE))
3884 return NULL_TREE;
3886 in_p = n_in_p, low = n_low, high = n_high;
3888 /* If the high bound is missing, but we have a nonzero low
3889 bound, reverse the range so it goes from zero to the low bound
3890 minus 1. */
3891 if (high == 0 && low && ! integer_zerop (low))
3893 in_p = ! in_p;
3894 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3895 integer_one_node, 0);
3896 low = build_int_cst (arg0_type, 0);
3900 *p_low = low;
3901 *p_high = high;
3902 *p_in_p = in_p;
3903 return arg0;
3905 case NEGATE_EXPR:
3906 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3907 low and high are non-NULL, then normalize will DTRT. */
3908 if (!TYPE_UNSIGNED (arg0_type)
3909 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3911 if (low == NULL_TREE)
3912 low = TYPE_MIN_VALUE (arg0_type);
3913 if (high == NULL_TREE)
3914 high = TYPE_MAX_VALUE (arg0_type);
3917 /* (-x) IN [a,b] -> x in [-b, -a] */
3918 n_low = range_binop (MINUS_EXPR, exp_type,
3919 build_int_cst (exp_type, 0),
3920 0, high, 1);
3921 n_high = range_binop (MINUS_EXPR, exp_type,
3922 build_int_cst (exp_type, 0),
3923 0, low, 0);
3924 if (n_high != 0 && TREE_OVERFLOW (n_high))
3925 return NULL_TREE;
3926 goto normalize;
3928 case BIT_NOT_EXPR:
3929 /* ~ X -> -X - 1 */
3930 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3931 build_int_cst (exp_type, 1));
3933 case PLUS_EXPR:
3934 case MINUS_EXPR:
3935 if (TREE_CODE (arg1) != INTEGER_CST)
3936 return NULL_TREE;
3938 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3939 move a constant to the other side. */
3940 if (!TYPE_UNSIGNED (arg0_type)
3941 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3942 return NULL_TREE;
3944 /* If EXP is signed, any overflow in the computation is undefined,
3945 so we don't worry about it so long as our computations on
3946 the bounds don't overflow. For unsigned, overflow is defined
3947 and this is exactly the right thing. */
3948 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3949 arg0_type, low, 0, arg1, 0);
3950 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3951 arg0_type, high, 1, arg1, 0);
3952 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3953 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3954 return NULL_TREE;
3956 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3957 *strict_overflow_p = true;
3959 normalize:
3960 /* Check for an unsigned range which has wrapped around the maximum
3961 value thus making n_high < n_low, and normalize it. */
3962 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3964 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3965 integer_one_node, 0);
3966 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3967 integer_one_node, 0);
3969 /* If the range is of the form +/- [ x+1, x ], we won't
3970 be able to normalize it. But then, it represents the
3971 whole range or the empty set, so make it
3972 +/- [ -, - ]. */
3973 if (tree_int_cst_equal (n_low, low)
3974 && tree_int_cst_equal (n_high, high))
3975 low = high = 0;
3976 else
3977 in_p = ! in_p;
3979 else
3980 low = n_low, high = n_high;
3982 *p_low = low;
3983 *p_high = high;
3984 *p_in_p = in_p;
3985 return arg0;
3987 CASE_CONVERT:
3988 case NON_LVALUE_EXPR:
3989 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3990 return NULL_TREE;
3992 if (! INTEGRAL_TYPE_P (arg0_type)
3993 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3994 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3995 return NULL_TREE;
3997 n_low = low, n_high = high;
3999 if (n_low != 0)
4000 n_low = fold_convert_loc (loc, arg0_type, n_low);
4002 if (n_high != 0)
4003 n_high = fold_convert_loc (loc, arg0_type, n_high);
4005 /* If we're converting arg0 from an unsigned type, to exp,
4006 a signed type, we will be doing the comparison as unsigned.
4007 The tests above have already verified that LOW and HIGH
4008 are both positive.
4010 So we have to ensure that we will handle large unsigned
4011 values the same way that the current signed bounds treat
4012 negative values. */
4014 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4016 tree high_positive;
4017 tree equiv_type;
4018 /* For fixed-point modes, we need to pass the saturating flag
4019 as the 2nd parameter. */
4020 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4021 equiv_type
4022 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4023 TYPE_SATURATING (arg0_type));
4024 else
4025 equiv_type
4026 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4028 /* A range without an upper bound is, naturally, unbounded.
4029 Since convert would have cropped a very large value, use
4030 the max value for the destination type. */
4031 high_positive
4032 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4033 : TYPE_MAX_VALUE (arg0_type);
4035 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4036 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4037 fold_convert_loc (loc, arg0_type,
4038 high_positive),
4039 build_int_cst (arg0_type, 1));
4041 /* If the low bound is specified, "and" the range with the
4042 range for which the original unsigned value will be
4043 positive. */
4044 if (low != 0)
4046 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4047 1, fold_convert_loc (loc, arg0_type,
4048 integer_zero_node),
4049 high_positive))
4050 return NULL_TREE;
4052 in_p = (n_in_p == in_p);
4054 else
4056 /* Otherwise, "or" the range with the range of the input
4057 that will be interpreted as negative. */
4058 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4059 1, fold_convert_loc (loc, arg0_type,
4060 integer_zero_node),
4061 high_positive))
4062 return NULL_TREE;
4064 in_p = (in_p != n_in_p);
4068 *p_low = n_low;
4069 *p_high = n_high;
4070 *p_in_p = in_p;
4071 return arg0;
4073 default:
4074 return NULL_TREE;
4078 /* Given EXP, a logical expression, set the range it is testing into
4079 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4080 actually being tested. *PLOW and *PHIGH will be made of the same
4081 type as the returned expression. If EXP is not a comparison, we
4082 will most likely not be returning a useful value and range. Set
4083 *STRICT_OVERFLOW_P to true if the return value is only valid
4084 because signed overflow is undefined; otherwise, do not change
4085 *STRICT_OVERFLOW_P. */
4087 tree
4088 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4089 bool *strict_overflow_p)
4091 enum tree_code code;
4092 tree arg0, arg1 = NULL_TREE;
4093 tree exp_type, nexp;
4094 int in_p;
4095 tree low, high;
4096 location_t loc = EXPR_LOCATION (exp);
4098 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4099 and see if we can refine the range. Some of the cases below may not
4100 happen, but it doesn't seem worth worrying about this. We "continue"
4101 the outer loop when we've changed something; otherwise we "break"
4102 the switch, which will "break" the while. */
4104 in_p = 0;
4105 low = high = build_int_cst (TREE_TYPE (exp), 0);
4107 while (1)
4109 code = TREE_CODE (exp);
4110 exp_type = TREE_TYPE (exp);
4111 arg0 = NULL_TREE;
4113 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4115 if (TREE_OPERAND_LENGTH (exp) > 0)
4116 arg0 = TREE_OPERAND (exp, 0);
4117 if (TREE_CODE_CLASS (code) == tcc_binary
4118 || TREE_CODE_CLASS (code) == tcc_comparison
4119 || (TREE_CODE_CLASS (code) == tcc_expression
4120 && TREE_OPERAND_LENGTH (exp) > 1))
4121 arg1 = TREE_OPERAND (exp, 1);
4123 if (arg0 == NULL_TREE)
4124 break;
4126 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4127 &high, &in_p, strict_overflow_p);
4128 if (nexp == NULL_TREE)
4129 break;
4130 exp = nexp;
4133 /* If EXP is a constant, we can evaluate whether this is true or false. */
4134 if (TREE_CODE (exp) == INTEGER_CST)
4136 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4137 exp, 0, low, 0))
4138 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4139 exp, 1, high, 1)));
4140 low = high = 0;
4141 exp = 0;
4144 *pin_p = in_p, *plow = low, *phigh = high;
4145 return exp;
4148 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4149 type, TYPE, return an expression to test if EXP is in (or out of, depending
4150 on IN_P) the range. Return 0 if the test couldn't be created. */
4152 tree
4153 build_range_check (location_t loc, tree type, tree exp, int in_p,
4154 tree low, tree high)
4156 tree etype = TREE_TYPE (exp), value;
4158 #ifdef HAVE_canonicalize_funcptr_for_compare
4159 /* Disable this optimization for function pointer expressions
4160 on targets that require function pointer canonicalization. */
4161 if (HAVE_canonicalize_funcptr_for_compare
4162 && TREE_CODE (etype) == POINTER_TYPE
4163 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4164 return NULL_TREE;
4165 #endif
4167 if (! in_p)
4169 value = build_range_check (loc, type, exp, 1, low, high);
4170 if (value != 0)
4171 return invert_truthvalue_loc (loc, value);
4173 return 0;
4176 if (low == 0 && high == 0)
4177 return build_int_cst (type, 1);
4179 if (low == 0)
4180 return fold_build2_loc (loc, LE_EXPR, type, exp,
4181 fold_convert_loc (loc, etype, high));
4183 if (high == 0)
4184 return fold_build2_loc (loc, GE_EXPR, type, exp,
4185 fold_convert_loc (loc, etype, low));
4187 if (operand_equal_p (low, high, 0))
4188 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4189 fold_convert_loc (loc, etype, low));
4191 if (integer_zerop (low))
4193 if (! TYPE_UNSIGNED (etype))
4195 etype = unsigned_type_for (etype);
4196 high = fold_convert_loc (loc, etype, high);
4197 exp = fold_convert_loc (loc, etype, exp);
4199 return build_range_check (loc, type, exp, 1, 0, high);
4202 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4203 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4205 unsigned HOST_WIDE_INT lo;
4206 HOST_WIDE_INT hi;
4207 int prec;
4209 prec = TYPE_PRECISION (etype);
4210 if (prec <= HOST_BITS_PER_WIDE_INT)
4212 hi = 0;
4213 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4215 else
4217 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4218 lo = (unsigned HOST_WIDE_INT) -1;
4221 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4223 if (TYPE_UNSIGNED (etype))
4225 tree signed_etype = signed_type_for (etype);
4226 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4227 etype
4228 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4229 else
4230 etype = signed_etype;
4231 exp = fold_convert_loc (loc, etype, exp);
4233 return fold_build2_loc (loc, GT_EXPR, type, exp,
4234 build_int_cst (etype, 0));
4238 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4239 This requires wrap-around arithmetics for the type of the expression.
4240 First make sure that arithmetics in this type is valid, then make sure
4241 that it wraps around. */
4242 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4243 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4244 TYPE_UNSIGNED (etype));
4246 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4248 tree utype, minv, maxv;
4250 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4251 for the type in question, as we rely on this here. */
4252 utype = unsigned_type_for (etype);
4253 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4254 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4255 integer_one_node, 1);
4256 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4258 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4259 minv, 1, maxv, 1)))
4260 etype = utype;
4261 else
4262 return 0;
4265 high = fold_convert_loc (loc, etype, high);
4266 low = fold_convert_loc (loc, etype, low);
4267 exp = fold_convert_loc (loc, etype, exp);
4269 value = const_binop (MINUS_EXPR, high, low);
4272 if (POINTER_TYPE_P (etype))
4274 if (value != 0 && !TREE_OVERFLOW (value))
4276 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4277 return build_range_check (loc, type,
4278 fold_build_pointer_plus_loc (loc, exp, low),
4279 1, build_int_cst (etype, 0), value);
4281 return 0;
4284 if (value != 0 && !TREE_OVERFLOW (value))
4285 return build_range_check (loc, type,
4286 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4287 1, build_int_cst (etype, 0), value);
4289 return 0;
4292 /* Return the predecessor of VAL in its type, handling the infinite case. */
4294 static tree
4295 range_predecessor (tree val)
4297 tree type = TREE_TYPE (val);
4299 if (INTEGRAL_TYPE_P (type)
4300 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4301 return 0;
4302 else
4303 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4306 /* Return the successor of VAL in its type, handling the infinite case. */
4308 static tree
4309 range_successor (tree val)
4311 tree type = TREE_TYPE (val);
4313 if (INTEGRAL_TYPE_P (type)
4314 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4315 return 0;
4316 else
4317 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4320 /* Given two ranges, see if we can merge them into one. Return 1 if we
4321 can, 0 if we can't. Set the output range into the specified parameters. */
4323 bool
4324 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4325 tree high0, int in1_p, tree low1, tree high1)
4327 int no_overlap;
4328 int subset;
4329 int temp;
4330 tree tem;
4331 int in_p;
4332 tree low, high;
4333 int lowequal = ((low0 == 0 && low1 == 0)
4334 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4335 low0, 0, low1, 0)));
4336 int highequal = ((high0 == 0 && high1 == 0)
4337 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4338 high0, 1, high1, 1)));
4340 /* Make range 0 be the range that starts first, or ends last if they
4341 start at the same value. Swap them if it isn't. */
4342 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4343 low0, 0, low1, 0))
4344 || (lowequal
4345 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4346 high1, 1, high0, 1))))
4348 temp = in0_p, in0_p = in1_p, in1_p = temp;
4349 tem = low0, low0 = low1, low1 = tem;
4350 tem = high0, high0 = high1, high1 = tem;
4353 /* Now flag two cases, whether the ranges are disjoint or whether the
4354 second range is totally subsumed in the first. Note that the tests
4355 below are simplified by the ones above. */
4356 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4357 high0, 1, low1, 0));
4358 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4359 high1, 1, high0, 1));
4361 /* We now have four cases, depending on whether we are including or
4362 excluding the two ranges. */
4363 if (in0_p && in1_p)
4365 /* If they don't overlap, the result is false. If the second range
4366 is a subset it is the result. Otherwise, the range is from the start
4367 of the second to the end of the first. */
4368 if (no_overlap)
4369 in_p = 0, low = high = 0;
4370 else if (subset)
4371 in_p = 1, low = low1, high = high1;
4372 else
4373 in_p = 1, low = low1, high = high0;
4376 else if (in0_p && ! in1_p)
4378 /* If they don't overlap, the result is the first range. If they are
4379 equal, the result is false. If the second range is a subset of the
4380 first, and the ranges begin at the same place, we go from just after
4381 the end of the second range to the end of the first. If the second
4382 range is not a subset of the first, or if it is a subset and both
4383 ranges end at the same place, the range starts at the start of the
4384 first range and ends just before the second range.
4385 Otherwise, we can't describe this as a single range. */
4386 if (no_overlap)
4387 in_p = 1, low = low0, high = high0;
4388 else if (lowequal && highequal)
4389 in_p = 0, low = high = 0;
4390 else if (subset && lowequal)
4392 low = range_successor (high1);
4393 high = high0;
4394 in_p = 1;
4395 if (low == 0)
4397 /* We are in the weird situation where high0 > high1 but
4398 high1 has no successor. Punt. */
4399 return 0;
4402 else if (! subset || highequal)
4404 low = low0;
4405 high = range_predecessor (low1);
4406 in_p = 1;
4407 if (high == 0)
4409 /* low0 < low1 but low1 has no predecessor. Punt. */
4410 return 0;
4413 else
4414 return 0;
4417 else if (! in0_p && in1_p)
4419 /* If they don't overlap, the result is the second range. If the second
4420 is a subset of the first, the result is false. Otherwise,
4421 the range starts just after the first range and ends at the
4422 end of the second. */
4423 if (no_overlap)
4424 in_p = 1, low = low1, high = high1;
4425 else if (subset || highequal)
4426 in_p = 0, low = high = 0;
4427 else
4429 low = range_successor (high0);
4430 high = high1;
4431 in_p = 1;
4432 if (low == 0)
4434 /* high1 > high0 but high0 has no successor. Punt. */
4435 return 0;
4440 else
4442 /* The case where we are excluding both ranges. Here the complex case
4443 is if they don't overlap. In that case, the only time we have a
4444 range is if they are adjacent. If the second is a subset of the
4445 first, the result is the first. Otherwise, the range to exclude
4446 starts at the beginning of the first range and ends at the end of the
4447 second. */
4448 if (no_overlap)
4450 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4451 range_successor (high0),
4452 1, low1, 0)))
4453 in_p = 0, low = low0, high = high1;
4454 else
4456 /* Canonicalize - [min, x] into - [-, x]. */
4457 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4458 switch (TREE_CODE (TREE_TYPE (low0)))
4460 case ENUMERAL_TYPE:
4461 if (TYPE_PRECISION (TREE_TYPE (low0))
4462 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4463 break;
4464 /* FALLTHROUGH */
4465 case INTEGER_TYPE:
4466 if (tree_int_cst_equal (low0,
4467 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4468 low0 = 0;
4469 break;
4470 case POINTER_TYPE:
4471 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4472 && integer_zerop (low0))
4473 low0 = 0;
4474 break;
4475 default:
4476 break;
4479 /* Canonicalize - [x, max] into - [x, -]. */
4480 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4481 switch (TREE_CODE (TREE_TYPE (high1)))
4483 case ENUMERAL_TYPE:
4484 if (TYPE_PRECISION (TREE_TYPE (high1))
4485 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4486 break;
4487 /* FALLTHROUGH */
4488 case INTEGER_TYPE:
4489 if (tree_int_cst_equal (high1,
4490 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4491 high1 = 0;
4492 break;
4493 case POINTER_TYPE:
4494 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4495 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4496 high1, 1,
4497 integer_one_node, 1)))
4498 high1 = 0;
4499 break;
4500 default:
4501 break;
4504 /* The ranges might be also adjacent between the maximum and
4505 minimum values of the given type. For
4506 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4507 return + [x + 1, y - 1]. */
4508 if (low0 == 0 && high1 == 0)
4510 low = range_successor (high0);
4511 high = range_predecessor (low1);
4512 if (low == 0 || high == 0)
4513 return 0;
4515 in_p = 1;
4517 else
4518 return 0;
4521 else if (subset)
4522 in_p = 0, low = low0, high = high0;
4523 else
4524 in_p = 0, low = low0, high = high1;
4527 *pin_p = in_p, *plow = low, *phigh = high;
4528 return 1;
4532 /* Subroutine of fold, looking inside expressions of the form
4533 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4534 of the COND_EXPR. This function is being used also to optimize
4535 A op B ? C : A, by reversing the comparison first.
4537 Return a folded expression whose code is not a COND_EXPR
4538 anymore, or NULL_TREE if no folding opportunity is found. */
4540 static tree
4541 fold_cond_expr_with_comparison (location_t loc, tree type,
4542 tree arg0, tree arg1, tree arg2)
4544 enum tree_code comp_code = TREE_CODE (arg0);
4545 tree arg00 = TREE_OPERAND (arg0, 0);
4546 tree arg01 = TREE_OPERAND (arg0, 1);
4547 tree arg1_type = TREE_TYPE (arg1);
4548 tree tem;
4550 STRIP_NOPS (arg1);
4551 STRIP_NOPS (arg2);
4553 /* If we have A op 0 ? A : -A, consider applying the following
4554 transformations:
4556 A == 0? A : -A same as -A
4557 A != 0? A : -A same as A
4558 A >= 0? A : -A same as abs (A)
4559 A > 0? A : -A same as abs (A)
4560 A <= 0? A : -A same as -abs (A)
4561 A < 0? A : -A same as -abs (A)
4563 None of these transformations work for modes with signed
4564 zeros. If A is +/-0, the first two transformations will
4565 change the sign of the result (from +0 to -0, or vice
4566 versa). The last four will fix the sign of the result,
4567 even though the original expressions could be positive or
4568 negative, depending on the sign of A.
4570 Note that all these transformations are correct if A is
4571 NaN, since the two alternatives (A and -A) are also NaNs. */
4572 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4573 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4574 ? real_zerop (arg01)
4575 : integer_zerop (arg01))
4576 && ((TREE_CODE (arg2) == NEGATE_EXPR
4577 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4578 /* In the case that A is of the form X-Y, '-A' (arg2) may
4579 have already been folded to Y-X, check for that. */
4580 || (TREE_CODE (arg1) == MINUS_EXPR
4581 && TREE_CODE (arg2) == MINUS_EXPR
4582 && operand_equal_p (TREE_OPERAND (arg1, 0),
4583 TREE_OPERAND (arg2, 1), 0)
4584 && operand_equal_p (TREE_OPERAND (arg1, 1),
4585 TREE_OPERAND (arg2, 0), 0))))
4586 switch (comp_code)
4588 case EQ_EXPR:
4589 case UNEQ_EXPR:
4590 tem = fold_convert_loc (loc, arg1_type, arg1);
4591 return pedantic_non_lvalue_loc (loc,
4592 fold_convert_loc (loc, type,
4593 negate_expr (tem)));
4594 case NE_EXPR:
4595 case LTGT_EXPR:
4596 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4597 case UNGE_EXPR:
4598 case UNGT_EXPR:
4599 if (flag_trapping_math)
4600 break;
4601 /* Fall through. */
4602 case GE_EXPR:
4603 case GT_EXPR:
4604 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4605 arg1 = fold_convert_loc (loc, signed_type_for
4606 (TREE_TYPE (arg1)), arg1);
4607 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4608 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4609 case UNLE_EXPR:
4610 case UNLT_EXPR:
4611 if (flag_trapping_math)
4612 break;
4613 case LE_EXPR:
4614 case LT_EXPR:
4615 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4616 arg1 = fold_convert_loc (loc, signed_type_for
4617 (TREE_TYPE (arg1)), arg1);
4618 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4619 return negate_expr (fold_convert_loc (loc, type, tem));
4620 default:
4621 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4622 break;
4625 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4626 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4627 both transformations are correct when A is NaN: A != 0
4628 is then true, and A == 0 is false. */
4630 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4631 && integer_zerop (arg01) && integer_zerop (arg2))
4633 if (comp_code == NE_EXPR)
4634 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4635 else if (comp_code == EQ_EXPR)
4636 return build_zero_cst (type);
4639 /* Try some transformations of A op B ? A : B.
4641 A == B? A : B same as B
4642 A != B? A : B same as A
4643 A >= B? A : B same as max (A, B)
4644 A > B? A : B same as max (B, A)
4645 A <= B? A : B same as min (A, B)
4646 A < B? A : B same as min (B, A)
4648 As above, these transformations don't work in the presence
4649 of signed zeros. For example, if A and B are zeros of
4650 opposite sign, the first two transformations will change
4651 the sign of the result. In the last four, the original
4652 expressions give different results for (A=+0, B=-0) and
4653 (A=-0, B=+0), but the transformed expressions do not.
4655 The first two transformations are correct if either A or B
4656 is a NaN. In the first transformation, the condition will
4657 be false, and B will indeed be chosen. In the case of the
4658 second transformation, the condition A != B will be true,
4659 and A will be chosen.
4661 The conversions to max() and min() are not correct if B is
4662 a number and A is not. The conditions in the original
4663 expressions will be false, so all four give B. The min()
4664 and max() versions would give a NaN instead. */
4665 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4666 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4667 /* Avoid these transformations if the COND_EXPR may be used
4668 as an lvalue in the C++ front-end. PR c++/19199. */
4669 && (in_gimple_form
4670 || VECTOR_TYPE_P (type)
4671 || (strcmp (lang_hooks.name, "GNU C++") != 0
4672 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4673 || ! maybe_lvalue_p (arg1)
4674 || ! maybe_lvalue_p (arg2)))
4676 tree comp_op0 = arg00;
4677 tree comp_op1 = arg01;
4678 tree comp_type = TREE_TYPE (comp_op0);
4680 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4681 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4683 comp_type = type;
4684 comp_op0 = arg1;
4685 comp_op1 = arg2;
4688 switch (comp_code)
4690 case EQ_EXPR:
4691 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4692 case NE_EXPR:
4693 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4694 case LE_EXPR:
4695 case LT_EXPR:
4696 case UNLE_EXPR:
4697 case UNLT_EXPR:
4698 /* In C++ a ?: expression can be an lvalue, so put the
4699 operand which will be used if they are equal first
4700 so that we can convert this back to the
4701 corresponding COND_EXPR. */
4702 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4704 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4705 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4706 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4707 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4708 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4709 comp_op1, comp_op0);
4710 return pedantic_non_lvalue_loc (loc,
4711 fold_convert_loc (loc, type, tem));
4713 break;
4714 case GE_EXPR:
4715 case GT_EXPR:
4716 case UNGE_EXPR:
4717 case UNGT_EXPR:
4718 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4720 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4721 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4722 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4723 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4724 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4725 comp_op1, comp_op0);
4726 return pedantic_non_lvalue_loc (loc,
4727 fold_convert_loc (loc, type, tem));
4729 break;
4730 case UNEQ_EXPR:
4731 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4732 return pedantic_non_lvalue_loc (loc,
4733 fold_convert_loc (loc, type, arg2));
4734 break;
4735 case LTGT_EXPR:
4736 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4737 return pedantic_non_lvalue_loc (loc,
4738 fold_convert_loc (loc, type, arg1));
4739 break;
4740 default:
4741 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4742 break;
4746 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4747 we might still be able to simplify this. For example,
4748 if C1 is one less or one more than C2, this might have started
4749 out as a MIN or MAX and been transformed by this function.
4750 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4752 if (INTEGRAL_TYPE_P (type)
4753 && TREE_CODE (arg01) == INTEGER_CST
4754 && TREE_CODE (arg2) == INTEGER_CST)
4755 switch (comp_code)
4757 case EQ_EXPR:
4758 if (TREE_CODE (arg1) == INTEGER_CST)
4759 break;
4760 /* We can replace A with C1 in this case. */
4761 arg1 = fold_convert_loc (loc, type, arg01);
4762 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4764 case LT_EXPR:
4765 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4766 MIN_EXPR, to preserve the signedness of the comparison. */
4767 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4768 OEP_ONLY_CONST)
4769 && operand_equal_p (arg01,
4770 const_binop (PLUS_EXPR, arg2,
4771 build_int_cst (type, 1)),
4772 OEP_ONLY_CONST))
4774 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4775 fold_convert_loc (loc, TREE_TYPE (arg00),
4776 arg2));
4777 return pedantic_non_lvalue_loc (loc,
4778 fold_convert_loc (loc, type, tem));
4780 break;
4782 case LE_EXPR:
4783 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4784 as above. */
4785 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4786 OEP_ONLY_CONST)
4787 && operand_equal_p (arg01,
4788 const_binop (MINUS_EXPR, arg2,
4789 build_int_cst (type, 1)),
4790 OEP_ONLY_CONST))
4792 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4793 fold_convert_loc (loc, TREE_TYPE (arg00),
4794 arg2));
4795 return pedantic_non_lvalue_loc (loc,
4796 fold_convert_loc (loc, type, tem));
4798 break;
4800 case GT_EXPR:
4801 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4802 MAX_EXPR, to preserve the signedness of the comparison. */
4803 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4804 OEP_ONLY_CONST)
4805 && operand_equal_p (arg01,
4806 const_binop (MINUS_EXPR, arg2,
4807 build_int_cst (type, 1)),
4808 OEP_ONLY_CONST))
4810 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4811 fold_convert_loc (loc, TREE_TYPE (arg00),
4812 arg2));
4813 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4815 break;
4817 case GE_EXPR:
4818 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4819 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4820 OEP_ONLY_CONST)
4821 && operand_equal_p (arg01,
4822 const_binop (PLUS_EXPR, arg2,
4823 build_int_cst (type, 1)),
4824 OEP_ONLY_CONST))
4826 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4827 fold_convert_loc (loc, TREE_TYPE (arg00),
4828 arg2));
4829 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4831 break;
4832 case NE_EXPR:
4833 break;
4834 default:
4835 gcc_unreachable ();
4838 return NULL_TREE;
4843 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4844 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4845 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4846 false) >= 2)
4847 #endif
4849 /* EXP is some logical combination of boolean tests. See if we can
4850 merge it into some range test. Return the new tree if so. */
4852 static tree
4853 fold_range_test (location_t loc, enum tree_code code, tree type,
4854 tree op0, tree op1)
4856 int or_op = (code == TRUTH_ORIF_EXPR
4857 || code == TRUTH_OR_EXPR);
4858 int in0_p, in1_p, in_p;
4859 tree low0, low1, low, high0, high1, high;
4860 bool strict_overflow_p = false;
4861 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4862 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4863 tree tem;
4864 const char * const warnmsg = G_("assuming signed overflow does not occur "
4865 "when simplifying range test");
4867 /* If this is an OR operation, invert both sides; we will invert
4868 again at the end. */
4869 if (or_op)
4870 in0_p = ! in0_p, in1_p = ! in1_p;
4872 /* If both expressions are the same, if we can merge the ranges, and we
4873 can build the range test, return it or it inverted. If one of the
4874 ranges is always true or always false, consider it to be the same
4875 expression as the other. */
4876 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4877 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4878 in1_p, low1, high1)
4879 && 0 != (tem = (build_range_check (loc, type,
4880 lhs != 0 ? lhs
4881 : rhs != 0 ? rhs : integer_zero_node,
4882 in_p, low, high))))
4884 if (strict_overflow_p)
4885 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4886 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4889 /* On machines where the branch cost is expensive, if this is a
4890 short-circuited branch and the underlying object on both sides
4891 is the same, make a non-short-circuit operation. */
4892 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4893 && lhs != 0 && rhs != 0
4894 && (code == TRUTH_ANDIF_EXPR
4895 || code == TRUTH_ORIF_EXPR)
4896 && operand_equal_p (lhs, rhs, 0))
4898 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4899 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4900 which cases we can't do this. */
4901 if (simple_operand_p (lhs))
4902 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4903 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4904 type, op0, op1);
4906 else if (!lang_hooks.decls.global_bindings_p ()
4907 && !CONTAINS_PLACEHOLDER_P (lhs))
4909 tree common = save_expr (lhs);
4911 if (0 != (lhs = build_range_check (loc, type, common,
4912 or_op ? ! in0_p : in0_p,
4913 low0, high0))
4914 && (0 != (rhs = build_range_check (loc, type, common,
4915 or_op ? ! in1_p : in1_p,
4916 low1, high1))))
4918 if (strict_overflow_p)
4919 fold_overflow_warning (warnmsg,
4920 WARN_STRICT_OVERFLOW_COMPARISON);
4921 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4922 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4923 type, lhs, rhs);
4928 return 0;
4931 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4932 bit value. Arrange things so the extra bits will be set to zero if and
4933 only if C is signed-extended to its full width. If MASK is nonzero,
4934 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4936 static tree
4937 unextend (tree c, int p, int unsignedp, tree mask)
4939 tree type = TREE_TYPE (c);
4940 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4941 tree temp;
4943 if (p == modesize || unsignedp)
4944 return c;
4946 /* We work by getting just the sign bit into the low-order bit, then
4947 into the high-order bit, then sign-extend. We then XOR that value
4948 with C. */
4949 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4950 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4952 /* We must use a signed type in order to get an arithmetic right shift.
4953 However, we must also avoid introducing accidental overflows, so that
4954 a subsequent call to integer_zerop will work. Hence we must
4955 do the type conversion here. At this point, the constant is either
4956 zero or one, and the conversion to a signed type can never overflow.
4957 We could get an overflow if this conversion is done anywhere else. */
4958 if (TYPE_UNSIGNED (type))
4959 temp = fold_convert (signed_type_for (type), temp);
4961 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4962 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4963 if (mask != 0)
4964 temp = const_binop (BIT_AND_EXPR, temp,
4965 fold_convert (TREE_TYPE (c), mask));
4966 /* If necessary, convert the type back to match the type of C. */
4967 if (TYPE_UNSIGNED (type))
4968 temp = fold_convert (type, temp);
4970 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4973 /* For an expression that has the form
4974 (A && B) || ~B
4976 (A || B) && ~B,
4977 we can drop one of the inner expressions and simplify to
4978 A || ~B
4980 A && ~B
4981 LOC is the location of the resulting expression. OP is the inner
4982 logical operation; the left-hand side in the examples above, while CMPOP
4983 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4984 removing a condition that guards another, as in
4985 (A != NULL && A->...) || A == NULL
4986 which we must not transform. If RHS_ONLY is true, only eliminate the
4987 right-most operand of the inner logical operation. */
4989 static tree
4990 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4991 bool rhs_only)
4993 tree type = TREE_TYPE (cmpop);
4994 enum tree_code code = TREE_CODE (cmpop);
4995 enum tree_code truthop_code = TREE_CODE (op);
4996 tree lhs = TREE_OPERAND (op, 0);
4997 tree rhs = TREE_OPERAND (op, 1);
4998 tree orig_lhs = lhs, orig_rhs = rhs;
4999 enum tree_code rhs_code = TREE_CODE (rhs);
5000 enum tree_code lhs_code = TREE_CODE (lhs);
5001 enum tree_code inv_code;
5003 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5004 return NULL_TREE;
5006 if (TREE_CODE_CLASS (code) != tcc_comparison)
5007 return NULL_TREE;
5009 if (rhs_code == truthop_code)
5011 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5012 if (newrhs != NULL_TREE)
5014 rhs = newrhs;
5015 rhs_code = TREE_CODE (rhs);
5018 if (lhs_code == truthop_code && !rhs_only)
5020 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5021 if (newlhs != NULL_TREE)
5023 lhs = newlhs;
5024 lhs_code = TREE_CODE (lhs);
5028 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5029 if (inv_code == rhs_code
5030 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5031 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5032 return lhs;
5033 if (!rhs_only && inv_code == lhs_code
5034 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5035 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5036 return rhs;
5037 if (rhs != orig_rhs || lhs != orig_lhs)
5038 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5039 lhs, rhs);
5040 return NULL_TREE;
5043 /* Find ways of folding logical expressions of LHS and RHS:
5044 Try to merge two comparisons to the same innermost item.
5045 Look for range tests like "ch >= '0' && ch <= '9'".
5046 Look for combinations of simple terms on machines with expensive branches
5047 and evaluate the RHS unconditionally.
5049 For example, if we have p->a == 2 && p->b == 4 and we can make an
5050 object large enough to span both A and B, we can do this with a comparison
5051 against the object ANDed with the a mask.
5053 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5054 operations to do this with one comparison.
5056 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5057 function and the one above.
5059 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5060 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5062 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5063 two operands.
5065 We return the simplified tree or 0 if no optimization is possible. */
5067 static tree
5068 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5069 tree lhs, tree rhs)
5071 /* If this is the "or" of two comparisons, we can do something if
5072 the comparisons are NE_EXPR. If this is the "and", we can do something
5073 if the comparisons are EQ_EXPR. I.e.,
5074 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5076 WANTED_CODE is this operation code. For single bit fields, we can
5077 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5078 comparison for one-bit fields. */
5080 enum tree_code wanted_code;
5081 enum tree_code lcode, rcode;
5082 tree ll_arg, lr_arg, rl_arg, rr_arg;
5083 tree ll_inner, lr_inner, rl_inner, rr_inner;
5084 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5085 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5086 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5087 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5088 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5089 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5090 enum machine_mode lnmode, rnmode;
5091 tree ll_mask, lr_mask, rl_mask, rr_mask;
5092 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5093 tree l_const, r_const;
5094 tree lntype, rntype, result;
5095 HOST_WIDE_INT first_bit, end_bit;
5096 int volatilep;
5098 /* Start by getting the comparison codes. Fail if anything is volatile.
5099 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5100 it were surrounded with a NE_EXPR. */
5102 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5103 return 0;
5105 lcode = TREE_CODE (lhs);
5106 rcode = TREE_CODE (rhs);
5108 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5110 lhs = build2 (NE_EXPR, truth_type, lhs,
5111 build_int_cst (TREE_TYPE (lhs), 0));
5112 lcode = NE_EXPR;
5115 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5117 rhs = build2 (NE_EXPR, truth_type, rhs,
5118 build_int_cst (TREE_TYPE (rhs), 0));
5119 rcode = NE_EXPR;
5122 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5123 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5124 return 0;
5126 ll_arg = TREE_OPERAND (lhs, 0);
5127 lr_arg = TREE_OPERAND (lhs, 1);
5128 rl_arg = TREE_OPERAND (rhs, 0);
5129 rr_arg = TREE_OPERAND (rhs, 1);
5131 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5132 if (simple_operand_p (ll_arg)
5133 && simple_operand_p (lr_arg))
5135 if (operand_equal_p (ll_arg, rl_arg, 0)
5136 && operand_equal_p (lr_arg, rr_arg, 0))
5138 result = combine_comparisons (loc, code, lcode, rcode,
5139 truth_type, ll_arg, lr_arg);
5140 if (result)
5141 return result;
5143 else if (operand_equal_p (ll_arg, rr_arg, 0)
5144 && operand_equal_p (lr_arg, rl_arg, 0))
5146 result = combine_comparisons (loc, code, lcode,
5147 swap_tree_comparison (rcode),
5148 truth_type, ll_arg, lr_arg);
5149 if (result)
5150 return result;
5154 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5155 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5157 /* If the RHS can be evaluated unconditionally and its operands are
5158 simple, it wins to evaluate the RHS unconditionally on machines
5159 with expensive branches. In this case, this isn't a comparison
5160 that can be merged. */
5162 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5163 false) >= 2
5164 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5165 && simple_operand_p (rl_arg)
5166 && simple_operand_p (rr_arg))
5168 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5169 if (code == TRUTH_OR_EXPR
5170 && lcode == NE_EXPR && integer_zerop (lr_arg)
5171 && rcode == NE_EXPR && integer_zerop (rr_arg)
5172 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5173 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5174 return build2_loc (loc, NE_EXPR, truth_type,
5175 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5176 ll_arg, rl_arg),
5177 build_int_cst (TREE_TYPE (ll_arg), 0));
5179 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5180 if (code == TRUTH_AND_EXPR
5181 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5182 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5183 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5184 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5185 return build2_loc (loc, EQ_EXPR, truth_type,
5186 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5187 ll_arg, rl_arg),
5188 build_int_cst (TREE_TYPE (ll_arg), 0));
5191 /* See if the comparisons can be merged. Then get all the parameters for
5192 each side. */
5194 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5195 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5196 return 0;
5198 volatilep = 0;
5199 ll_inner = decode_field_reference (loc, ll_arg,
5200 &ll_bitsize, &ll_bitpos, &ll_mode,
5201 &ll_unsignedp, &volatilep, &ll_mask,
5202 &ll_and_mask);
5203 lr_inner = decode_field_reference (loc, lr_arg,
5204 &lr_bitsize, &lr_bitpos, &lr_mode,
5205 &lr_unsignedp, &volatilep, &lr_mask,
5206 &lr_and_mask);
5207 rl_inner = decode_field_reference (loc, rl_arg,
5208 &rl_bitsize, &rl_bitpos, &rl_mode,
5209 &rl_unsignedp, &volatilep, &rl_mask,
5210 &rl_and_mask);
5211 rr_inner = decode_field_reference (loc, rr_arg,
5212 &rr_bitsize, &rr_bitpos, &rr_mode,
5213 &rr_unsignedp, &volatilep, &rr_mask,
5214 &rr_and_mask);
5216 /* It must be true that the inner operation on the lhs of each
5217 comparison must be the same if we are to be able to do anything.
5218 Then see if we have constants. If not, the same must be true for
5219 the rhs's. */
5220 if (volatilep || ll_inner == 0 || rl_inner == 0
5221 || ! operand_equal_p (ll_inner, rl_inner, 0))
5222 return 0;
5224 if (TREE_CODE (lr_arg) == INTEGER_CST
5225 && TREE_CODE (rr_arg) == INTEGER_CST)
5226 l_const = lr_arg, r_const = rr_arg;
5227 else if (lr_inner == 0 || rr_inner == 0
5228 || ! operand_equal_p (lr_inner, rr_inner, 0))
5229 return 0;
5230 else
5231 l_const = r_const = 0;
5233 /* If either comparison code is not correct for our logical operation,
5234 fail. However, we can convert a one-bit comparison against zero into
5235 the opposite comparison against that bit being set in the field. */
5237 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5238 if (lcode != wanted_code)
5240 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5242 /* Make the left operand unsigned, since we are only interested
5243 in the value of one bit. Otherwise we are doing the wrong
5244 thing below. */
5245 ll_unsignedp = 1;
5246 l_const = ll_mask;
5248 else
5249 return 0;
5252 /* This is analogous to the code for l_const above. */
5253 if (rcode != wanted_code)
5255 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5257 rl_unsignedp = 1;
5258 r_const = rl_mask;
5260 else
5261 return 0;
5264 /* See if we can find a mode that contains both fields being compared on
5265 the left. If we can't, fail. Otherwise, update all constants and masks
5266 to be relative to a field of that size. */
5267 first_bit = MIN (ll_bitpos, rl_bitpos);
5268 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5269 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5270 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5271 volatilep);
5272 if (lnmode == VOIDmode)
5273 return 0;
5275 lnbitsize = GET_MODE_BITSIZE (lnmode);
5276 lnbitpos = first_bit & ~ (lnbitsize - 1);
5277 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5278 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5280 if (BYTES_BIG_ENDIAN)
5282 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5283 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5286 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5287 size_int (xll_bitpos));
5288 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5289 size_int (xrl_bitpos));
5291 if (l_const)
5293 l_const = fold_convert_loc (loc, lntype, l_const);
5294 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5295 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5296 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5297 fold_build1_loc (loc, BIT_NOT_EXPR,
5298 lntype, ll_mask))))
5300 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5302 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5305 if (r_const)
5307 r_const = fold_convert_loc (loc, lntype, r_const);
5308 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5309 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5310 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5311 fold_build1_loc (loc, BIT_NOT_EXPR,
5312 lntype, rl_mask))))
5314 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5316 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5320 /* If the right sides are not constant, do the same for it. Also,
5321 disallow this optimization if a size or signedness mismatch occurs
5322 between the left and right sides. */
5323 if (l_const == 0)
5325 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5326 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5327 /* Make sure the two fields on the right
5328 correspond to the left without being swapped. */
5329 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5330 return 0;
5332 first_bit = MIN (lr_bitpos, rr_bitpos);
5333 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5334 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5335 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5336 volatilep);
5337 if (rnmode == VOIDmode)
5338 return 0;
5340 rnbitsize = GET_MODE_BITSIZE (rnmode);
5341 rnbitpos = first_bit & ~ (rnbitsize - 1);
5342 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5343 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5345 if (BYTES_BIG_ENDIAN)
5347 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5348 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5351 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5352 rntype, lr_mask),
5353 size_int (xlr_bitpos));
5354 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5355 rntype, rr_mask),
5356 size_int (xrr_bitpos));
5358 /* Make a mask that corresponds to both fields being compared.
5359 Do this for both items being compared. If the operands are the
5360 same size and the bits being compared are in the same position
5361 then we can do this by masking both and comparing the masked
5362 results. */
5363 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5364 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5365 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5367 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5368 ll_unsignedp || rl_unsignedp);
5369 if (! all_ones_mask_p (ll_mask, lnbitsize))
5370 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5372 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5373 lr_unsignedp || rr_unsignedp);
5374 if (! all_ones_mask_p (lr_mask, rnbitsize))
5375 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5377 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5380 /* There is still another way we can do something: If both pairs of
5381 fields being compared are adjacent, we may be able to make a wider
5382 field containing them both.
5384 Note that we still must mask the lhs/rhs expressions. Furthermore,
5385 the mask must be shifted to account for the shift done by
5386 make_bit_field_ref. */
5387 if ((ll_bitsize + ll_bitpos == rl_bitpos
5388 && lr_bitsize + lr_bitpos == rr_bitpos)
5389 || (ll_bitpos == rl_bitpos + rl_bitsize
5390 && lr_bitpos == rr_bitpos + rr_bitsize))
5392 tree type;
5394 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5395 ll_bitsize + rl_bitsize,
5396 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5397 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5398 lr_bitsize + rr_bitsize,
5399 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5401 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5402 size_int (MIN (xll_bitpos, xrl_bitpos)));
5403 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5404 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5406 /* Convert to the smaller type before masking out unwanted bits. */
5407 type = lntype;
5408 if (lntype != rntype)
5410 if (lnbitsize > rnbitsize)
5412 lhs = fold_convert_loc (loc, rntype, lhs);
5413 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5414 type = rntype;
5416 else if (lnbitsize < rnbitsize)
5418 rhs = fold_convert_loc (loc, lntype, rhs);
5419 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5420 type = lntype;
5424 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5425 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5427 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5428 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5430 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5433 return 0;
5436 /* Handle the case of comparisons with constants. If there is something in
5437 common between the masks, those bits of the constants must be the same.
5438 If not, the condition is always false. Test for this to avoid generating
5439 incorrect code below. */
5440 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5441 if (! integer_zerop (result)
5442 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5443 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5445 if (wanted_code == NE_EXPR)
5447 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5448 return constant_boolean_node (true, truth_type);
5450 else
5452 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5453 return constant_boolean_node (false, truth_type);
5457 /* Construct the expression we will return. First get the component
5458 reference we will make. Unless the mask is all ones the width of
5459 that field, perform the mask operation. Then compare with the
5460 merged constant. */
5461 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5462 ll_unsignedp || rl_unsignedp);
5464 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5465 if (! all_ones_mask_p (ll_mask, lnbitsize))
5466 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5468 return build2_loc (loc, wanted_code, truth_type, result,
5469 const_binop (BIT_IOR_EXPR, l_const, r_const));
5472 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5473 constant. */
5475 static tree
5476 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5477 tree op0, tree op1)
5479 tree arg0 = op0;
5480 enum tree_code op_code;
5481 tree comp_const;
5482 tree minmax_const;
5483 int consts_equal, consts_lt;
5484 tree inner;
5486 STRIP_SIGN_NOPS (arg0);
5488 op_code = TREE_CODE (arg0);
5489 minmax_const = TREE_OPERAND (arg0, 1);
5490 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5491 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5492 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5493 inner = TREE_OPERAND (arg0, 0);
5495 /* If something does not permit us to optimize, return the original tree. */
5496 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5497 || TREE_CODE (comp_const) != INTEGER_CST
5498 || TREE_OVERFLOW (comp_const)
5499 || TREE_CODE (minmax_const) != INTEGER_CST
5500 || TREE_OVERFLOW (minmax_const))
5501 return NULL_TREE;
5503 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5504 and GT_EXPR, doing the rest with recursive calls using logical
5505 simplifications. */
5506 switch (code)
5508 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5510 tree tem
5511 = optimize_minmax_comparison (loc,
5512 invert_tree_comparison (code, false),
5513 type, op0, op1);
5514 if (tem)
5515 return invert_truthvalue_loc (loc, tem);
5516 return NULL_TREE;
5519 case GE_EXPR:
5520 return
5521 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5522 optimize_minmax_comparison
5523 (loc, EQ_EXPR, type, arg0, comp_const),
5524 optimize_minmax_comparison
5525 (loc, GT_EXPR, type, arg0, comp_const));
5527 case EQ_EXPR:
5528 if (op_code == MAX_EXPR && consts_equal)
5529 /* MAX (X, 0) == 0 -> X <= 0 */
5530 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5532 else if (op_code == MAX_EXPR && consts_lt)
5533 /* MAX (X, 0) == 5 -> X == 5 */
5534 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5536 else if (op_code == MAX_EXPR)
5537 /* MAX (X, 0) == -1 -> false */
5538 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5540 else if (consts_equal)
5541 /* MIN (X, 0) == 0 -> X >= 0 */
5542 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5544 else if (consts_lt)
5545 /* MIN (X, 0) == 5 -> false */
5546 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5548 else
5549 /* MIN (X, 0) == -1 -> X == -1 */
5550 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5552 case GT_EXPR:
5553 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5554 /* MAX (X, 0) > 0 -> X > 0
5555 MAX (X, 0) > 5 -> X > 5 */
5556 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5558 else if (op_code == MAX_EXPR)
5559 /* MAX (X, 0) > -1 -> true */
5560 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5562 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5563 /* MIN (X, 0) > 0 -> false
5564 MIN (X, 0) > 5 -> false */
5565 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5567 else
5568 /* MIN (X, 0) > -1 -> X > -1 */
5569 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5571 default:
5572 return NULL_TREE;
5576 /* T is an integer expression that is being multiplied, divided, or taken a
5577 modulus (CODE says which and what kind of divide or modulus) by a
5578 constant C. See if we can eliminate that operation by folding it with
5579 other operations already in T. WIDE_TYPE, if non-null, is a type that
5580 should be used for the computation if wider than our type.
5582 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5583 (X * 2) + (Y * 4). We must, however, be assured that either the original
5584 expression would not overflow or that overflow is undefined for the type
5585 in the language in question.
5587 If we return a non-null expression, it is an equivalent form of the
5588 original computation, but need not be in the original type.
5590 We set *STRICT_OVERFLOW_P to true if the return values depends on
5591 signed overflow being undefined. Otherwise we do not change
5592 *STRICT_OVERFLOW_P. */
5594 static tree
5595 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5596 bool *strict_overflow_p)
5598 /* To avoid exponential search depth, refuse to allow recursion past
5599 three levels. Beyond that (1) it's highly unlikely that we'll find
5600 something interesting and (2) we've probably processed it before
5601 when we built the inner expression. */
5603 static int depth;
5604 tree ret;
5606 if (depth > 3)
5607 return NULL;
5609 depth++;
5610 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5611 depth--;
5613 return ret;
5616 static tree
5617 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5618 bool *strict_overflow_p)
5620 tree type = TREE_TYPE (t);
5621 enum tree_code tcode = TREE_CODE (t);
5622 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5623 > GET_MODE_SIZE (TYPE_MODE (type)))
5624 ? wide_type : type);
5625 tree t1, t2;
5626 int same_p = tcode == code;
5627 tree op0 = NULL_TREE, op1 = NULL_TREE;
5628 bool sub_strict_overflow_p;
5630 /* Don't deal with constants of zero here; they confuse the code below. */
5631 if (integer_zerop (c))
5632 return NULL_TREE;
5634 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5635 op0 = TREE_OPERAND (t, 0);
5637 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5638 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5640 /* Note that we need not handle conditional operations here since fold
5641 already handles those cases. So just do arithmetic here. */
5642 switch (tcode)
5644 case INTEGER_CST:
5645 /* For a constant, we can always simplify if we are a multiply
5646 or (for divide and modulus) if it is a multiple of our constant. */
5647 if (code == MULT_EXPR
5648 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5649 return const_binop (code, fold_convert (ctype, t),
5650 fold_convert (ctype, c));
5651 break;
5653 CASE_CONVERT: case NON_LVALUE_EXPR:
5654 /* If op0 is an expression ... */
5655 if ((COMPARISON_CLASS_P (op0)
5656 || UNARY_CLASS_P (op0)
5657 || BINARY_CLASS_P (op0)
5658 || VL_EXP_CLASS_P (op0)
5659 || EXPRESSION_CLASS_P (op0))
5660 /* ... and has wrapping overflow, and its type is smaller
5661 than ctype, then we cannot pass through as widening. */
5662 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5663 && (TYPE_PRECISION (ctype)
5664 > TYPE_PRECISION (TREE_TYPE (op0))))
5665 /* ... or this is a truncation (t is narrower than op0),
5666 then we cannot pass through this narrowing. */
5667 || (TYPE_PRECISION (type)
5668 < TYPE_PRECISION (TREE_TYPE (op0)))
5669 /* ... or signedness changes for division or modulus,
5670 then we cannot pass through this conversion. */
5671 || (code != MULT_EXPR
5672 && (TYPE_UNSIGNED (ctype)
5673 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5674 /* ... or has undefined overflow while the converted to
5675 type has not, we cannot do the operation in the inner type
5676 as that would introduce undefined overflow. */
5677 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5678 && !TYPE_OVERFLOW_UNDEFINED (type))))
5679 break;
5681 /* Pass the constant down and see if we can make a simplification. If
5682 we can, replace this expression with the inner simplification for
5683 possible later conversion to our or some other type. */
5684 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5685 && TREE_CODE (t2) == INTEGER_CST
5686 && !TREE_OVERFLOW (t2)
5687 && (0 != (t1 = extract_muldiv (op0, t2, code,
5688 code == MULT_EXPR
5689 ? ctype : NULL_TREE,
5690 strict_overflow_p))))
5691 return t1;
5692 break;
5694 case ABS_EXPR:
5695 /* If widening the type changes it from signed to unsigned, then we
5696 must avoid building ABS_EXPR itself as unsigned. */
5697 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5699 tree cstype = (*signed_type_for) (ctype);
5700 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5701 != 0)
5703 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5704 return fold_convert (ctype, t1);
5706 break;
5708 /* If the constant is negative, we cannot simplify this. */
5709 if (tree_int_cst_sgn (c) == -1)
5710 break;
5711 /* FALLTHROUGH */
5712 case NEGATE_EXPR:
5713 /* For division and modulus, type can't be unsigned, as e.g.
5714 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5715 For signed types, even with wrapping overflow, this is fine. */
5716 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5717 break;
5718 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5719 != 0)
5720 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5721 break;
5723 case MIN_EXPR: case MAX_EXPR:
5724 /* If widening the type changes the signedness, then we can't perform
5725 this optimization as that changes the result. */
5726 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5727 break;
5729 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5730 sub_strict_overflow_p = false;
5731 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5732 &sub_strict_overflow_p)) != 0
5733 && (t2 = extract_muldiv (op1, c, code, wide_type,
5734 &sub_strict_overflow_p)) != 0)
5736 if (tree_int_cst_sgn (c) < 0)
5737 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5738 if (sub_strict_overflow_p)
5739 *strict_overflow_p = true;
5740 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5741 fold_convert (ctype, t2));
5743 break;
5745 case LSHIFT_EXPR: case RSHIFT_EXPR:
5746 /* If the second operand is constant, this is a multiplication
5747 or floor division, by a power of two, so we can treat it that
5748 way unless the multiplier or divisor overflows. Signed
5749 left-shift overflow is implementation-defined rather than
5750 undefined in C90, so do not convert signed left shift into
5751 multiplication. */
5752 if (TREE_CODE (op1) == INTEGER_CST
5753 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5754 /* const_binop may not detect overflow correctly,
5755 so check for it explicitly here. */
5756 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5757 && TREE_INT_CST_HIGH (op1) == 0
5758 && 0 != (t1 = fold_convert (ctype,
5759 const_binop (LSHIFT_EXPR,
5760 size_one_node,
5761 op1)))
5762 && !TREE_OVERFLOW (t1))
5763 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5764 ? MULT_EXPR : FLOOR_DIV_EXPR,
5765 ctype,
5766 fold_convert (ctype, op0),
5767 t1),
5768 c, code, wide_type, strict_overflow_p);
5769 break;
5771 case PLUS_EXPR: case MINUS_EXPR:
5772 /* See if we can eliminate the operation on both sides. If we can, we
5773 can return a new PLUS or MINUS. If we can't, the only remaining
5774 cases where we can do anything are if the second operand is a
5775 constant. */
5776 sub_strict_overflow_p = false;
5777 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5778 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5779 if (t1 != 0 && t2 != 0
5780 && (code == MULT_EXPR
5781 /* If not multiplication, we can only do this if both operands
5782 are divisible by c. */
5783 || (multiple_of_p (ctype, op0, c)
5784 && multiple_of_p (ctype, op1, c))))
5786 if (sub_strict_overflow_p)
5787 *strict_overflow_p = true;
5788 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5789 fold_convert (ctype, t2));
5792 /* If this was a subtraction, negate OP1 and set it to be an addition.
5793 This simplifies the logic below. */
5794 if (tcode == MINUS_EXPR)
5796 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5797 /* If OP1 was not easily negatable, the constant may be OP0. */
5798 if (TREE_CODE (op0) == INTEGER_CST)
5800 tree tem = op0;
5801 op0 = op1;
5802 op1 = tem;
5803 tem = t1;
5804 t1 = t2;
5805 t2 = tem;
5809 if (TREE_CODE (op1) != INTEGER_CST)
5810 break;
5812 /* If either OP1 or C are negative, this optimization is not safe for
5813 some of the division and remainder types while for others we need
5814 to change the code. */
5815 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5817 if (code == CEIL_DIV_EXPR)
5818 code = FLOOR_DIV_EXPR;
5819 else if (code == FLOOR_DIV_EXPR)
5820 code = CEIL_DIV_EXPR;
5821 else if (code != MULT_EXPR
5822 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5823 break;
5826 /* If it's a multiply or a division/modulus operation of a multiple
5827 of our constant, do the operation and verify it doesn't overflow. */
5828 if (code == MULT_EXPR
5829 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5831 op1 = const_binop (code, fold_convert (ctype, op1),
5832 fold_convert (ctype, c));
5833 /* We allow the constant to overflow with wrapping semantics. */
5834 if (op1 == 0
5835 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5836 break;
5838 else
5839 break;
5841 /* If we have an unsigned type, we cannot widen the operation since it
5842 will change the result if the original computation overflowed. */
5843 if (TYPE_UNSIGNED (ctype) && ctype != type)
5844 break;
5846 /* If we were able to eliminate our operation from the first side,
5847 apply our operation to the second side and reform the PLUS. */
5848 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5849 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5851 /* The last case is if we are a multiply. In that case, we can
5852 apply the distributive law to commute the multiply and addition
5853 if the multiplication of the constants doesn't overflow. */
5854 if (code == MULT_EXPR)
5855 return fold_build2 (tcode, ctype,
5856 fold_build2 (code, ctype,
5857 fold_convert (ctype, op0),
5858 fold_convert (ctype, c)),
5859 op1);
5861 break;
5863 case MULT_EXPR:
5864 /* We have a special case here if we are doing something like
5865 (C * 8) % 4 since we know that's zero. */
5866 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5867 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5868 /* If the multiplication can overflow we cannot optimize this. */
5869 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5870 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5871 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5873 *strict_overflow_p = true;
5874 return omit_one_operand (type, integer_zero_node, op0);
5877 /* ... fall through ... */
5879 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5880 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5881 /* If we can extract our operation from the LHS, do so and return a
5882 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5883 do something only if the second operand is a constant. */
5884 if (same_p
5885 && (t1 = extract_muldiv (op0, c, code, wide_type,
5886 strict_overflow_p)) != 0)
5887 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5888 fold_convert (ctype, op1));
5889 else if (tcode == MULT_EXPR && code == MULT_EXPR
5890 && (t1 = extract_muldiv (op1, c, code, wide_type,
5891 strict_overflow_p)) != 0)
5892 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5893 fold_convert (ctype, t1));
5894 else if (TREE_CODE (op1) != INTEGER_CST)
5895 return 0;
5897 /* If these are the same operation types, we can associate them
5898 assuming no overflow. */
5899 if (tcode == code)
5901 double_int mul;
5902 bool overflow_p;
5903 unsigned prec = TYPE_PRECISION (ctype);
5904 bool uns = TYPE_UNSIGNED (ctype);
5905 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5906 double_int dic = tree_to_double_int (c).ext (prec, uns);
5907 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5908 overflow_p = ((!uns && overflow_p)
5909 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5910 if (!double_int_fits_to_tree_p (ctype, mul)
5911 && ((uns && tcode != MULT_EXPR) || !uns))
5912 overflow_p = 1;
5913 if (!overflow_p)
5914 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5915 double_int_to_tree (ctype, mul));
5918 /* If these operations "cancel" each other, we have the main
5919 optimizations of this pass, which occur when either constant is a
5920 multiple of the other, in which case we replace this with either an
5921 operation or CODE or TCODE.
5923 If we have an unsigned type, we cannot do this since it will change
5924 the result if the original computation overflowed. */
5925 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5926 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5927 || (tcode == MULT_EXPR
5928 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5929 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5930 && code != MULT_EXPR)))
5932 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5934 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5935 *strict_overflow_p = true;
5936 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5937 fold_convert (ctype,
5938 const_binop (TRUNC_DIV_EXPR,
5939 op1, c)));
5941 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5943 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5944 *strict_overflow_p = true;
5945 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5946 fold_convert (ctype,
5947 const_binop (TRUNC_DIV_EXPR,
5948 c, op1)));
5951 break;
5953 default:
5954 break;
5957 return 0;
5960 /* Return a node which has the indicated constant VALUE (either 0 or
5961 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5962 and is of the indicated TYPE. */
5964 tree
5965 constant_boolean_node (bool value, tree type)
5967 if (type == integer_type_node)
5968 return value ? integer_one_node : integer_zero_node;
5969 else if (type == boolean_type_node)
5970 return value ? boolean_true_node : boolean_false_node;
5971 else if (TREE_CODE (type) == VECTOR_TYPE)
5972 return build_vector_from_val (type,
5973 build_int_cst (TREE_TYPE (type),
5974 value ? -1 : 0));
5975 else
5976 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5980 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5981 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5982 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5983 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5984 COND is the first argument to CODE; otherwise (as in the example
5985 given here), it is the second argument. TYPE is the type of the
5986 original expression. Return NULL_TREE if no simplification is
5987 possible. */
5989 static tree
5990 fold_binary_op_with_conditional_arg (location_t loc,
5991 enum tree_code code,
5992 tree type, tree op0, tree op1,
5993 tree cond, tree arg, int cond_first_p)
5995 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5996 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5997 tree test, true_value, false_value;
5998 tree lhs = NULL_TREE;
5999 tree rhs = NULL_TREE;
6000 enum tree_code cond_code = COND_EXPR;
6002 if (TREE_CODE (cond) == COND_EXPR
6003 || TREE_CODE (cond) == VEC_COND_EXPR)
6005 test = TREE_OPERAND (cond, 0);
6006 true_value = TREE_OPERAND (cond, 1);
6007 false_value = TREE_OPERAND (cond, 2);
6008 /* If this operand throws an expression, then it does not make
6009 sense to try to perform a logical or arithmetic operation
6010 involving it. */
6011 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6012 lhs = true_value;
6013 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6014 rhs = false_value;
6016 else
6018 tree testtype = TREE_TYPE (cond);
6019 test = cond;
6020 true_value = constant_boolean_node (true, testtype);
6021 false_value = constant_boolean_node (false, testtype);
6024 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6025 cond_code = VEC_COND_EXPR;
6027 /* This transformation is only worthwhile if we don't have to wrap ARG
6028 in a SAVE_EXPR and the operation can be simplified without recursing
6029 on at least one of the branches once its pushed inside the COND_EXPR. */
6030 if (!TREE_CONSTANT (arg)
6031 && (TREE_SIDE_EFFECTS (arg)
6032 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6033 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6034 return NULL_TREE;
6036 arg = fold_convert_loc (loc, arg_type, arg);
6037 if (lhs == 0)
6039 true_value = fold_convert_loc (loc, cond_type, true_value);
6040 if (cond_first_p)
6041 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6042 else
6043 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6045 if (rhs == 0)
6047 false_value = fold_convert_loc (loc, cond_type, false_value);
6048 if (cond_first_p)
6049 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6050 else
6051 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6054 /* Check that we have simplified at least one of the branches. */
6055 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6056 return NULL_TREE;
6058 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6062 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6064 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6065 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6066 ADDEND is the same as X.
6068 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6069 and finite. The problematic cases are when X is zero, and its mode
6070 has signed zeros. In the case of rounding towards -infinity,
6071 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6072 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6074 bool
6075 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6077 if (!real_zerop (addend))
6078 return false;
6080 /* Don't allow the fold with -fsignaling-nans. */
6081 if (HONOR_SNANS (TYPE_MODE (type)))
6082 return false;
6084 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6085 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6086 return true;
6088 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6089 if (TREE_CODE (addend) == REAL_CST
6090 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6091 negate = !negate;
6093 /* The mode has signed zeros, and we have to honor their sign.
6094 In this situation, there is only one case we can return true for.
6095 X - 0 is the same as X unless rounding towards -infinity is
6096 supported. */
6097 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6100 /* Subroutine of fold() that checks comparisons of built-in math
6101 functions against real constants.
6103 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6104 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6105 is the type of the result and ARG0 and ARG1 are the operands of the
6106 comparison. ARG1 must be a TREE_REAL_CST.
6108 The function returns the constant folded tree if a simplification
6109 can be made, and NULL_TREE otherwise. */
6111 static tree
6112 fold_mathfn_compare (location_t loc,
6113 enum built_in_function fcode, enum tree_code code,
6114 tree type, tree arg0, tree arg1)
6116 REAL_VALUE_TYPE c;
6118 if (BUILTIN_SQRT_P (fcode))
6120 tree arg = CALL_EXPR_ARG (arg0, 0);
6121 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6123 c = TREE_REAL_CST (arg1);
6124 if (REAL_VALUE_NEGATIVE (c))
6126 /* sqrt(x) < y is always false, if y is negative. */
6127 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6128 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6130 /* sqrt(x) > y is always true, if y is negative and we
6131 don't care about NaNs, i.e. negative values of x. */
6132 if (code == NE_EXPR || !HONOR_NANS (mode))
6133 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6135 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6136 return fold_build2_loc (loc, GE_EXPR, type, arg,
6137 build_real (TREE_TYPE (arg), dconst0));
6139 else if (code == GT_EXPR || code == GE_EXPR)
6141 REAL_VALUE_TYPE c2;
6143 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6144 real_convert (&c2, mode, &c2);
6146 if (REAL_VALUE_ISINF (c2))
6148 /* sqrt(x) > y is x == +Inf, when y is very large. */
6149 if (HONOR_INFINITIES (mode))
6150 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6151 build_real (TREE_TYPE (arg), c2));
6153 /* sqrt(x) > y is always false, when y is very large
6154 and we don't care about infinities. */
6155 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6158 /* sqrt(x) > c is the same as x > c*c. */
6159 return fold_build2_loc (loc, code, type, arg,
6160 build_real (TREE_TYPE (arg), c2));
6162 else if (code == LT_EXPR || code == LE_EXPR)
6164 REAL_VALUE_TYPE c2;
6166 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6167 real_convert (&c2, mode, &c2);
6169 if (REAL_VALUE_ISINF (c2))
6171 /* sqrt(x) < y is always true, when y is a very large
6172 value and we don't care about NaNs or Infinities. */
6173 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6174 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6176 /* sqrt(x) < y is x != +Inf when y is very large and we
6177 don't care about NaNs. */
6178 if (! HONOR_NANS (mode))
6179 return fold_build2_loc (loc, NE_EXPR, type, arg,
6180 build_real (TREE_TYPE (arg), c2));
6182 /* sqrt(x) < y is x >= 0 when y is very large and we
6183 don't care about Infinities. */
6184 if (! HONOR_INFINITIES (mode))
6185 return fold_build2_loc (loc, GE_EXPR, type, arg,
6186 build_real (TREE_TYPE (arg), dconst0));
6188 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6189 arg = save_expr (arg);
6190 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6191 fold_build2_loc (loc, GE_EXPR, type, arg,
6192 build_real (TREE_TYPE (arg),
6193 dconst0)),
6194 fold_build2_loc (loc, NE_EXPR, type, arg,
6195 build_real (TREE_TYPE (arg),
6196 c2)));
6199 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6200 if (! HONOR_NANS (mode))
6201 return fold_build2_loc (loc, code, type, arg,
6202 build_real (TREE_TYPE (arg), c2));
6204 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6205 arg = save_expr (arg);
6206 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6207 fold_build2_loc (loc, GE_EXPR, type, arg,
6208 build_real (TREE_TYPE (arg),
6209 dconst0)),
6210 fold_build2_loc (loc, code, type, arg,
6211 build_real (TREE_TYPE (arg),
6212 c2)));
6216 return NULL_TREE;
6219 /* Subroutine of fold() that optimizes comparisons against Infinities,
6220 either +Inf or -Inf.
6222 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6223 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6224 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6226 The function returns the constant folded tree if a simplification
6227 can be made, and NULL_TREE otherwise. */
6229 static tree
6230 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6231 tree arg0, tree arg1)
6233 enum machine_mode mode;
6234 REAL_VALUE_TYPE max;
6235 tree temp;
6236 bool neg;
6238 mode = TYPE_MODE (TREE_TYPE (arg0));
6240 /* For negative infinity swap the sense of the comparison. */
6241 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6242 if (neg)
6243 code = swap_tree_comparison (code);
6245 switch (code)
6247 case GT_EXPR:
6248 /* x > +Inf is always false, if with ignore sNANs. */
6249 if (HONOR_SNANS (mode))
6250 return NULL_TREE;
6251 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6253 case LE_EXPR:
6254 /* x <= +Inf is always true, if we don't case about NaNs. */
6255 if (! HONOR_NANS (mode))
6256 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6258 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6259 arg0 = save_expr (arg0);
6260 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6262 case EQ_EXPR:
6263 case GE_EXPR:
6264 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6265 real_maxval (&max, neg, mode);
6266 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6267 arg0, build_real (TREE_TYPE (arg0), max));
6269 case LT_EXPR:
6270 /* x < +Inf is always equal to x <= DBL_MAX. */
6271 real_maxval (&max, neg, mode);
6272 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6273 arg0, build_real (TREE_TYPE (arg0), max));
6275 case NE_EXPR:
6276 /* x != +Inf is always equal to !(x > DBL_MAX). */
6277 real_maxval (&max, neg, mode);
6278 if (! HONOR_NANS (mode))
6279 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6280 arg0, build_real (TREE_TYPE (arg0), max));
6282 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6283 arg0, build_real (TREE_TYPE (arg0), max));
6284 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6286 default:
6287 break;
6290 return NULL_TREE;
6293 /* Subroutine of fold() that optimizes comparisons of a division by
6294 a nonzero integer constant against an integer constant, i.e.
6295 X/C1 op C2.
6297 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6298 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6299 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6301 The function returns the constant folded tree if a simplification
6302 can be made, and NULL_TREE otherwise. */
6304 static tree
6305 fold_div_compare (location_t loc,
6306 enum tree_code code, tree type, tree arg0, tree arg1)
6308 tree prod, tmp, hi, lo;
6309 tree arg00 = TREE_OPERAND (arg0, 0);
6310 tree arg01 = TREE_OPERAND (arg0, 1);
6311 double_int val;
6312 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6313 bool neg_overflow;
6314 bool overflow;
6316 /* We have to do this the hard way to detect unsigned overflow.
6317 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6318 val = TREE_INT_CST (arg01)
6319 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6320 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6321 neg_overflow = false;
6323 if (unsigned_p)
6325 tmp = int_const_binop (MINUS_EXPR, arg01,
6326 build_int_cst (TREE_TYPE (arg01), 1));
6327 lo = prod;
6329 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6330 val = TREE_INT_CST (prod)
6331 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6332 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6333 -1, overflow | TREE_OVERFLOW (prod));
6335 else if (tree_int_cst_sgn (arg01) >= 0)
6337 tmp = int_const_binop (MINUS_EXPR, arg01,
6338 build_int_cst (TREE_TYPE (arg01), 1));
6339 switch (tree_int_cst_sgn (arg1))
6341 case -1:
6342 neg_overflow = true;
6343 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6344 hi = prod;
6345 break;
6347 case 0:
6348 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6349 hi = tmp;
6350 break;
6352 case 1:
6353 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6354 lo = prod;
6355 break;
6357 default:
6358 gcc_unreachable ();
6361 else
6363 /* A negative divisor reverses the relational operators. */
6364 code = swap_tree_comparison (code);
6366 tmp = int_const_binop (PLUS_EXPR, arg01,
6367 build_int_cst (TREE_TYPE (arg01), 1));
6368 switch (tree_int_cst_sgn (arg1))
6370 case -1:
6371 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6372 lo = prod;
6373 break;
6375 case 0:
6376 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6377 lo = tmp;
6378 break;
6380 case 1:
6381 neg_overflow = true;
6382 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6383 hi = prod;
6384 break;
6386 default:
6387 gcc_unreachable ();
6391 switch (code)
6393 case EQ_EXPR:
6394 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6395 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6396 if (TREE_OVERFLOW (hi))
6397 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6398 if (TREE_OVERFLOW (lo))
6399 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6400 return build_range_check (loc, type, arg00, 1, lo, hi);
6402 case NE_EXPR:
6403 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6404 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6405 if (TREE_OVERFLOW (hi))
6406 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6407 if (TREE_OVERFLOW (lo))
6408 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6409 return build_range_check (loc, type, arg00, 0, lo, hi);
6411 case LT_EXPR:
6412 if (TREE_OVERFLOW (lo))
6414 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6415 return omit_one_operand_loc (loc, type, tmp, arg00);
6417 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6419 case LE_EXPR:
6420 if (TREE_OVERFLOW (hi))
6422 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6423 return omit_one_operand_loc (loc, type, tmp, arg00);
6425 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6427 case GT_EXPR:
6428 if (TREE_OVERFLOW (hi))
6430 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6431 return omit_one_operand_loc (loc, type, tmp, arg00);
6433 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6435 case GE_EXPR:
6436 if (TREE_OVERFLOW (lo))
6438 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6439 return omit_one_operand_loc (loc, type, tmp, arg00);
6441 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6443 default:
6444 break;
6447 return NULL_TREE;
6451 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6452 equality/inequality test, then return a simplified form of the test
6453 using a sign testing. Otherwise return NULL. TYPE is the desired
6454 result type. */
6456 static tree
6457 fold_single_bit_test_into_sign_test (location_t loc,
6458 enum tree_code code, tree arg0, tree arg1,
6459 tree result_type)
6461 /* If this is testing a single bit, we can optimize the test. */
6462 if ((code == NE_EXPR || code == EQ_EXPR)
6463 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6464 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6466 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6467 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6468 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6470 if (arg00 != NULL_TREE
6471 /* This is only a win if casting to a signed type is cheap,
6472 i.e. when arg00's type is not a partial mode. */
6473 && TYPE_PRECISION (TREE_TYPE (arg00))
6474 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6476 tree stype = signed_type_for (TREE_TYPE (arg00));
6477 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6478 result_type,
6479 fold_convert_loc (loc, stype, arg00),
6480 build_int_cst (stype, 0));
6484 return NULL_TREE;
6487 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6488 equality/inequality test, then return a simplified form of
6489 the test using shifts and logical operations. Otherwise return
6490 NULL. TYPE is the desired result type. */
6492 tree
6493 fold_single_bit_test (location_t loc, enum tree_code code,
6494 tree arg0, tree arg1, tree result_type)
6496 /* If this is testing a single bit, we can optimize the test. */
6497 if ((code == NE_EXPR || code == EQ_EXPR)
6498 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6499 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6501 tree inner = TREE_OPERAND (arg0, 0);
6502 tree type = TREE_TYPE (arg0);
6503 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6504 enum machine_mode operand_mode = TYPE_MODE (type);
6505 int ops_unsigned;
6506 tree signed_type, unsigned_type, intermediate_type;
6507 tree tem, one;
6509 /* First, see if we can fold the single bit test into a sign-bit
6510 test. */
6511 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6512 result_type);
6513 if (tem)
6514 return tem;
6516 /* Otherwise we have (A & C) != 0 where C is a single bit,
6517 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6518 Similarly for (A & C) == 0. */
6520 /* If INNER is a right shift of a constant and it plus BITNUM does
6521 not overflow, adjust BITNUM and INNER. */
6522 if (TREE_CODE (inner) == RSHIFT_EXPR
6523 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6524 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6525 && bitnum < TYPE_PRECISION (type)
6526 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6527 bitnum - TYPE_PRECISION (type)))
6529 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6530 inner = TREE_OPERAND (inner, 0);
6533 /* If we are going to be able to omit the AND below, we must do our
6534 operations as unsigned. If we must use the AND, we have a choice.
6535 Normally unsigned is faster, but for some machines signed is. */
6536 #ifdef LOAD_EXTEND_OP
6537 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6538 && !flag_syntax_only) ? 0 : 1;
6539 #else
6540 ops_unsigned = 1;
6541 #endif
6543 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6544 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6545 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6546 inner = fold_convert_loc (loc, intermediate_type, inner);
6548 if (bitnum != 0)
6549 inner = build2 (RSHIFT_EXPR, intermediate_type,
6550 inner, size_int (bitnum));
6552 one = build_int_cst (intermediate_type, 1);
6554 if (code == EQ_EXPR)
6555 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6557 /* Put the AND last so it can combine with more things. */
6558 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6560 /* Make sure to return the proper type. */
6561 inner = fold_convert_loc (loc, result_type, inner);
6563 return inner;
6565 return NULL_TREE;
6568 /* Check whether we are allowed to reorder operands arg0 and arg1,
6569 such that the evaluation of arg1 occurs before arg0. */
6571 static bool
6572 reorder_operands_p (const_tree arg0, const_tree arg1)
6574 if (! flag_evaluation_order)
6575 return true;
6576 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6577 return true;
6578 return ! TREE_SIDE_EFFECTS (arg0)
6579 && ! TREE_SIDE_EFFECTS (arg1);
6582 /* Test whether it is preferable two swap two operands, ARG0 and
6583 ARG1, for example because ARG0 is an integer constant and ARG1
6584 isn't. If REORDER is true, only recommend swapping if we can
6585 evaluate the operands in reverse order. */
6587 bool
6588 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6590 STRIP_SIGN_NOPS (arg0);
6591 STRIP_SIGN_NOPS (arg1);
6593 if (TREE_CODE (arg1) == INTEGER_CST)
6594 return 0;
6595 if (TREE_CODE (arg0) == INTEGER_CST)
6596 return 1;
6598 if (TREE_CODE (arg1) == REAL_CST)
6599 return 0;
6600 if (TREE_CODE (arg0) == REAL_CST)
6601 return 1;
6603 if (TREE_CODE (arg1) == FIXED_CST)
6604 return 0;
6605 if (TREE_CODE (arg0) == FIXED_CST)
6606 return 1;
6608 if (TREE_CODE (arg1) == COMPLEX_CST)
6609 return 0;
6610 if (TREE_CODE (arg0) == COMPLEX_CST)
6611 return 1;
6613 if (TREE_CONSTANT (arg1))
6614 return 0;
6615 if (TREE_CONSTANT (arg0))
6616 return 1;
6618 if (optimize_function_for_size_p (cfun))
6619 return 0;
6621 if (reorder && flag_evaluation_order
6622 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6623 return 0;
6625 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6626 for commutative and comparison operators. Ensuring a canonical
6627 form allows the optimizers to find additional redundancies without
6628 having to explicitly check for both orderings. */
6629 if (TREE_CODE (arg0) == SSA_NAME
6630 && TREE_CODE (arg1) == SSA_NAME
6631 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6632 return 1;
6634 /* Put SSA_NAMEs last. */
6635 if (TREE_CODE (arg1) == SSA_NAME)
6636 return 0;
6637 if (TREE_CODE (arg0) == SSA_NAME)
6638 return 1;
6640 /* Put variables last. */
6641 if (DECL_P (arg1))
6642 return 0;
6643 if (DECL_P (arg0))
6644 return 1;
6646 return 0;
6649 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6650 ARG0 is extended to a wider type. */
6652 static tree
6653 fold_widened_comparison (location_t loc, enum tree_code code,
6654 tree type, tree arg0, tree arg1)
6656 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6657 tree arg1_unw;
6658 tree shorter_type, outer_type;
6659 tree min, max;
6660 bool above, below;
6662 if (arg0_unw == arg0)
6663 return NULL_TREE;
6664 shorter_type = TREE_TYPE (arg0_unw);
6666 #ifdef HAVE_canonicalize_funcptr_for_compare
6667 /* Disable this optimization if we're casting a function pointer
6668 type on targets that require function pointer canonicalization. */
6669 if (HAVE_canonicalize_funcptr_for_compare
6670 && TREE_CODE (shorter_type) == POINTER_TYPE
6671 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6672 return NULL_TREE;
6673 #endif
6675 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6676 return NULL_TREE;
6678 arg1_unw = get_unwidened (arg1, NULL_TREE);
6680 /* If possible, express the comparison in the shorter mode. */
6681 if ((code == EQ_EXPR || code == NE_EXPR
6682 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6683 && (TREE_TYPE (arg1_unw) == shorter_type
6684 || ((TYPE_PRECISION (shorter_type)
6685 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6686 && (TYPE_UNSIGNED (shorter_type)
6687 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6688 || (TREE_CODE (arg1_unw) == INTEGER_CST
6689 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6690 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6691 && int_fits_type_p (arg1_unw, shorter_type))))
6692 return fold_build2_loc (loc, code, type, arg0_unw,
6693 fold_convert_loc (loc, shorter_type, arg1_unw));
6695 if (TREE_CODE (arg1_unw) != INTEGER_CST
6696 || TREE_CODE (shorter_type) != INTEGER_TYPE
6697 || !int_fits_type_p (arg1_unw, shorter_type))
6698 return NULL_TREE;
6700 /* If we are comparing with the integer that does not fit into the range
6701 of the shorter type, the result is known. */
6702 outer_type = TREE_TYPE (arg1_unw);
6703 min = lower_bound_in_type (outer_type, shorter_type);
6704 max = upper_bound_in_type (outer_type, shorter_type);
6706 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6707 max, arg1_unw));
6708 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6709 arg1_unw, min));
6711 switch (code)
6713 case EQ_EXPR:
6714 if (above || below)
6715 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6716 break;
6718 case NE_EXPR:
6719 if (above || below)
6720 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6721 break;
6723 case LT_EXPR:
6724 case LE_EXPR:
6725 if (above)
6726 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6727 else if (below)
6728 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6730 case GT_EXPR:
6731 case GE_EXPR:
6732 if (above)
6733 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6734 else if (below)
6735 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6737 default:
6738 break;
6741 return NULL_TREE;
6744 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6745 ARG0 just the signedness is changed. */
6747 static tree
6748 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6749 tree arg0, tree arg1)
6751 tree arg0_inner;
6752 tree inner_type, outer_type;
6754 if (!CONVERT_EXPR_P (arg0))
6755 return NULL_TREE;
6757 outer_type = TREE_TYPE (arg0);
6758 arg0_inner = TREE_OPERAND (arg0, 0);
6759 inner_type = TREE_TYPE (arg0_inner);
6761 #ifdef HAVE_canonicalize_funcptr_for_compare
6762 /* Disable this optimization if we're casting a function pointer
6763 type on targets that require function pointer canonicalization. */
6764 if (HAVE_canonicalize_funcptr_for_compare
6765 && TREE_CODE (inner_type) == POINTER_TYPE
6766 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6767 return NULL_TREE;
6768 #endif
6770 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6771 return NULL_TREE;
6773 if (TREE_CODE (arg1) != INTEGER_CST
6774 && !(CONVERT_EXPR_P (arg1)
6775 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6776 return NULL_TREE;
6778 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6779 && code != NE_EXPR
6780 && code != EQ_EXPR)
6781 return NULL_TREE;
6783 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6784 return NULL_TREE;
6786 if (TREE_CODE (arg1) == INTEGER_CST)
6787 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6788 0, TREE_OVERFLOW (arg1));
6789 else
6790 arg1 = fold_convert_loc (loc, inner_type, arg1);
6792 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6795 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6796 step of the array. Reconstructs s and delta in the case of s *
6797 delta being an integer constant (and thus already folded). ADDR is
6798 the address. MULT is the multiplicative expression. If the
6799 function succeeds, the new address expression is returned.
6800 Otherwise NULL_TREE is returned. LOC is the location of the
6801 resulting expression. */
6803 static tree
6804 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6806 tree s, delta, step;
6807 tree ref = TREE_OPERAND (addr, 0), pref;
6808 tree ret, pos;
6809 tree itype;
6810 bool mdim = false;
6812 /* Strip the nops that might be added when converting op1 to sizetype. */
6813 STRIP_NOPS (op1);
6815 /* Canonicalize op1 into a possibly non-constant delta
6816 and an INTEGER_CST s. */
6817 if (TREE_CODE (op1) == MULT_EXPR)
6819 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6821 STRIP_NOPS (arg0);
6822 STRIP_NOPS (arg1);
6824 if (TREE_CODE (arg0) == INTEGER_CST)
6826 s = arg0;
6827 delta = arg1;
6829 else if (TREE_CODE (arg1) == INTEGER_CST)
6831 s = arg1;
6832 delta = arg0;
6834 else
6835 return NULL_TREE;
6837 else if (TREE_CODE (op1) == INTEGER_CST)
6839 delta = op1;
6840 s = NULL_TREE;
6842 else
6844 /* Simulate we are delta * 1. */
6845 delta = op1;
6846 s = integer_one_node;
6849 /* Handle &x.array the same as we would handle &x.array[0]. */
6850 if (TREE_CODE (ref) == COMPONENT_REF
6851 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6853 tree domain;
6855 /* Remember if this was a multi-dimensional array. */
6856 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6857 mdim = true;
6859 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6860 if (! domain)
6861 goto cont;
6862 itype = TREE_TYPE (domain);
6864 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6865 if (TREE_CODE (step) != INTEGER_CST)
6866 goto cont;
6868 if (s)
6870 if (! tree_int_cst_equal (step, s))
6871 goto cont;
6873 else
6875 /* Try if delta is a multiple of step. */
6876 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6877 if (! tmp)
6878 goto cont;
6879 delta = tmp;
6882 /* Only fold here if we can verify we do not overflow one
6883 dimension of a multi-dimensional array. */
6884 if (mdim)
6886 tree tmp;
6888 if (!TYPE_MIN_VALUE (domain)
6889 || !TYPE_MAX_VALUE (domain)
6890 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6891 goto cont;
6893 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6894 fold_convert_loc (loc, itype,
6895 TYPE_MIN_VALUE (domain)),
6896 fold_convert_loc (loc, itype, delta));
6897 if (TREE_CODE (tmp) != INTEGER_CST
6898 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6899 goto cont;
6902 /* We found a suitable component reference. */
6904 pref = TREE_OPERAND (addr, 0);
6905 ret = copy_node (pref);
6906 SET_EXPR_LOCATION (ret, loc);
6908 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6909 fold_build2_loc
6910 (loc, PLUS_EXPR, itype,
6911 fold_convert_loc (loc, itype,
6912 TYPE_MIN_VALUE
6913 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6914 fold_convert_loc (loc, itype, delta)),
6915 NULL_TREE, NULL_TREE);
6916 return build_fold_addr_expr_loc (loc, ret);
6919 cont:
6921 for (;; ref = TREE_OPERAND (ref, 0))
6923 if (TREE_CODE (ref) == ARRAY_REF)
6925 tree domain;
6927 /* Remember if this was a multi-dimensional array. */
6928 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6929 mdim = true;
6931 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6932 if (! domain)
6933 continue;
6934 itype = TREE_TYPE (domain);
6936 step = array_ref_element_size (ref);
6937 if (TREE_CODE (step) != INTEGER_CST)
6938 continue;
6940 if (s)
6942 if (! tree_int_cst_equal (step, s))
6943 continue;
6945 else
6947 /* Try if delta is a multiple of step. */
6948 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6949 if (! tmp)
6950 continue;
6951 delta = tmp;
6954 /* Only fold here if we can verify we do not overflow one
6955 dimension of a multi-dimensional array. */
6956 if (mdim)
6958 tree tmp;
6960 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6961 || !TYPE_MAX_VALUE (domain)
6962 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6963 continue;
6965 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6966 fold_convert_loc (loc, itype,
6967 TREE_OPERAND (ref, 1)),
6968 fold_convert_loc (loc, itype, delta));
6969 if (!tmp
6970 || TREE_CODE (tmp) != INTEGER_CST
6971 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6972 continue;
6975 break;
6977 else
6978 mdim = false;
6980 if (!handled_component_p (ref))
6981 return NULL_TREE;
6984 /* We found the suitable array reference. So copy everything up to it,
6985 and replace the index. */
6987 pref = TREE_OPERAND (addr, 0);
6988 ret = copy_node (pref);
6989 SET_EXPR_LOCATION (ret, loc);
6990 pos = ret;
6992 while (pref != ref)
6994 pref = TREE_OPERAND (pref, 0);
6995 TREE_OPERAND (pos, 0) = copy_node (pref);
6996 pos = TREE_OPERAND (pos, 0);
6999 TREE_OPERAND (pos, 1)
7000 = fold_build2_loc (loc, PLUS_EXPR, itype,
7001 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7002 fold_convert_loc (loc, itype, delta));
7003 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7007 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7008 means A >= Y && A != MAX, but in this case we know that
7009 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7011 static tree
7012 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7014 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7016 if (TREE_CODE (bound) == LT_EXPR)
7017 a = TREE_OPERAND (bound, 0);
7018 else if (TREE_CODE (bound) == GT_EXPR)
7019 a = TREE_OPERAND (bound, 1);
7020 else
7021 return NULL_TREE;
7023 typea = TREE_TYPE (a);
7024 if (!INTEGRAL_TYPE_P (typea)
7025 && !POINTER_TYPE_P (typea))
7026 return NULL_TREE;
7028 if (TREE_CODE (ineq) == LT_EXPR)
7030 a1 = TREE_OPERAND (ineq, 1);
7031 y = TREE_OPERAND (ineq, 0);
7033 else if (TREE_CODE (ineq) == GT_EXPR)
7035 a1 = TREE_OPERAND (ineq, 0);
7036 y = TREE_OPERAND (ineq, 1);
7038 else
7039 return NULL_TREE;
7041 if (TREE_TYPE (a1) != typea)
7042 return NULL_TREE;
7044 if (POINTER_TYPE_P (typea))
7046 /* Convert the pointer types into integer before taking the difference. */
7047 tree ta = fold_convert_loc (loc, ssizetype, a);
7048 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7049 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7051 else
7052 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7054 if (!diff || !integer_onep (diff))
7055 return NULL_TREE;
7057 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7060 /* Fold a sum or difference of at least one multiplication.
7061 Returns the folded tree or NULL if no simplification could be made. */
7063 static tree
7064 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7065 tree arg0, tree arg1)
7067 tree arg00, arg01, arg10, arg11;
7068 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7070 /* (A * C) +- (B * C) -> (A+-B) * C.
7071 (A * C) +- A -> A * (C+-1).
7072 We are most concerned about the case where C is a constant,
7073 but other combinations show up during loop reduction. Since
7074 it is not difficult, try all four possibilities. */
7076 if (TREE_CODE (arg0) == MULT_EXPR)
7078 arg00 = TREE_OPERAND (arg0, 0);
7079 arg01 = TREE_OPERAND (arg0, 1);
7081 else if (TREE_CODE (arg0) == INTEGER_CST)
7083 arg00 = build_one_cst (type);
7084 arg01 = arg0;
7086 else
7088 /* We cannot generate constant 1 for fract. */
7089 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7090 return NULL_TREE;
7091 arg00 = arg0;
7092 arg01 = build_one_cst (type);
7094 if (TREE_CODE (arg1) == MULT_EXPR)
7096 arg10 = TREE_OPERAND (arg1, 0);
7097 arg11 = TREE_OPERAND (arg1, 1);
7099 else if (TREE_CODE (arg1) == INTEGER_CST)
7101 arg10 = build_one_cst (type);
7102 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7103 the purpose of this canonicalization. */
7104 if (TREE_INT_CST_HIGH (arg1) == -1
7105 && negate_expr_p (arg1)
7106 && code == PLUS_EXPR)
7108 arg11 = negate_expr (arg1);
7109 code = MINUS_EXPR;
7111 else
7112 arg11 = arg1;
7114 else
7116 /* We cannot generate constant 1 for fract. */
7117 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7118 return NULL_TREE;
7119 arg10 = arg1;
7120 arg11 = build_one_cst (type);
7122 same = NULL_TREE;
7124 if (operand_equal_p (arg01, arg11, 0))
7125 same = arg01, alt0 = arg00, alt1 = arg10;
7126 else if (operand_equal_p (arg00, arg10, 0))
7127 same = arg00, alt0 = arg01, alt1 = arg11;
7128 else if (operand_equal_p (arg00, arg11, 0))
7129 same = arg00, alt0 = arg01, alt1 = arg10;
7130 else if (operand_equal_p (arg01, arg10, 0))
7131 same = arg01, alt0 = arg00, alt1 = arg11;
7133 /* No identical multiplicands; see if we can find a common
7134 power-of-two factor in non-power-of-two multiplies. This
7135 can help in multi-dimensional array access. */
7136 else if (host_integerp (arg01, 0)
7137 && host_integerp (arg11, 0))
7139 HOST_WIDE_INT int01, int11, tmp;
7140 bool swap = false;
7141 tree maybe_same;
7142 int01 = TREE_INT_CST_LOW (arg01);
7143 int11 = TREE_INT_CST_LOW (arg11);
7145 /* Move min of absolute values to int11. */
7146 if (absu_hwi (int01) < absu_hwi (int11))
7148 tmp = int01, int01 = int11, int11 = tmp;
7149 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7150 maybe_same = arg01;
7151 swap = true;
7153 else
7154 maybe_same = arg11;
7156 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7157 /* The remainder should not be a constant, otherwise we
7158 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7159 increased the number of multiplications necessary. */
7160 && TREE_CODE (arg10) != INTEGER_CST)
7162 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7163 build_int_cst (TREE_TYPE (arg00),
7164 int01 / int11));
7165 alt1 = arg10;
7166 same = maybe_same;
7167 if (swap)
7168 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7172 if (same)
7173 return fold_build2_loc (loc, MULT_EXPR, type,
7174 fold_build2_loc (loc, code, type,
7175 fold_convert_loc (loc, type, alt0),
7176 fold_convert_loc (loc, type, alt1)),
7177 fold_convert_loc (loc, type, same));
7179 return NULL_TREE;
7182 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7183 specified by EXPR into the buffer PTR of length LEN bytes.
7184 Return the number of bytes placed in the buffer, or zero
7185 upon failure. */
7187 static int
7188 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7190 tree type = TREE_TYPE (expr);
7191 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7192 int byte, offset, word, words;
7193 unsigned char value;
7195 if (total_bytes > len)
7196 return 0;
7197 words = total_bytes / UNITS_PER_WORD;
7199 for (byte = 0; byte < total_bytes; byte++)
7201 int bitpos = byte * BITS_PER_UNIT;
7202 if (bitpos < HOST_BITS_PER_WIDE_INT)
7203 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7204 else
7205 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7206 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7208 if (total_bytes > UNITS_PER_WORD)
7210 word = byte / UNITS_PER_WORD;
7211 if (WORDS_BIG_ENDIAN)
7212 word = (words - 1) - word;
7213 offset = word * UNITS_PER_WORD;
7214 if (BYTES_BIG_ENDIAN)
7215 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7216 else
7217 offset += byte % UNITS_PER_WORD;
7219 else
7220 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7221 ptr[offset] = value;
7223 return total_bytes;
7227 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7228 specified by EXPR into the buffer PTR of length LEN bytes.
7229 Return the number of bytes placed in the buffer, or zero
7230 upon failure. */
7232 static int
7233 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7235 tree type = TREE_TYPE (expr);
7236 enum machine_mode mode = TYPE_MODE (type);
7237 int total_bytes = GET_MODE_SIZE (mode);
7238 FIXED_VALUE_TYPE value;
7239 tree i_value, i_type;
7241 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7242 return 0;
7244 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7246 if (NULL_TREE == i_type
7247 || TYPE_PRECISION (i_type) != total_bytes)
7248 return 0;
7250 value = TREE_FIXED_CST (expr);
7251 i_value = double_int_to_tree (i_type, value.data);
7253 return native_encode_int (i_value, ptr, len);
7257 /* Subroutine of native_encode_expr. Encode the REAL_CST
7258 specified by EXPR into the buffer PTR of length LEN bytes.
7259 Return the number of bytes placed in the buffer, or zero
7260 upon failure. */
7262 static int
7263 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7265 tree type = TREE_TYPE (expr);
7266 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7267 int byte, offset, word, words, bitpos;
7268 unsigned char value;
7270 /* There are always 32 bits in each long, no matter the size of
7271 the hosts long. We handle floating point representations with
7272 up to 192 bits. */
7273 long tmp[6];
7275 if (total_bytes > len)
7276 return 0;
7277 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7279 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7281 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7282 bitpos += BITS_PER_UNIT)
7284 byte = (bitpos / BITS_PER_UNIT) & 3;
7285 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7287 if (UNITS_PER_WORD < 4)
7289 word = byte / UNITS_PER_WORD;
7290 if (WORDS_BIG_ENDIAN)
7291 word = (words - 1) - word;
7292 offset = word * UNITS_PER_WORD;
7293 if (BYTES_BIG_ENDIAN)
7294 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7295 else
7296 offset += byte % UNITS_PER_WORD;
7298 else
7299 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7300 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7302 return total_bytes;
7305 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7306 specified by EXPR into the buffer PTR of length LEN bytes.
7307 Return the number of bytes placed in the buffer, or zero
7308 upon failure. */
7310 static int
7311 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7313 int rsize, isize;
7314 tree part;
7316 part = TREE_REALPART (expr);
7317 rsize = native_encode_expr (part, ptr, len);
7318 if (rsize == 0)
7319 return 0;
7320 part = TREE_IMAGPART (expr);
7321 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7322 if (isize != rsize)
7323 return 0;
7324 return rsize + isize;
7328 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7329 specified by EXPR into the buffer PTR of length LEN bytes.
7330 Return the number of bytes placed in the buffer, or zero
7331 upon failure. */
7333 static int
7334 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7336 unsigned i, count;
7337 int size, offset;
7338 tree itype, elem;
7340 offset = 0;
7341 count = VECTOR_CST_NELTS (expr);
7342 itype = TREE_TYPE (TREE_TYPE (expr));
7343 size = GET_MODE_SIZE (TYPE_MODE (itype));
7344 for (i = 0; i < count; i++)
7346 elem = VECTOR_CST_ELT (expr, i);
7347 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7348 return 0;
7349 offset += size;
7351 return offset;
7355 /* Subroutine of native_encode_expr. Encode the STRING_CST
7356 specified by EXPR into the buffer PTR of length LEN bytes.
7357 Return the number of bytes placed in the buffer, or zero
7358 upon failure. */
7360 static int
7361 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7363 tree type = TREE_TYPE (expr);
7364 HOST_WIDE_INT total_bytes;
7366 if (TREE_CODE (type) != ARRAY_TYPE
7367 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7368 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7369 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7370 return 0;
7371 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7372 if (total_bytes > len)
7373 return 0;
7374 if (TREE_STRING_LENGTH (expr) < total_bytes)
7376 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7377 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7378 total_bytes - TREE_STRING_LENGTH (expr));
7380 else
7381 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7382 return total_bytes;
7386 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7387 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7388 buffer PTR of length LEN bytes. Return the number of bytes
7389 placed in the buffer, or zero upon failure. */
7392 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7394 switch (TREE_CODE (expr))
7396 case INTEGER_CST:
7397 return native_encode_int (expr, ptr, len);
7399 case REAL_CST:
7400 return native_encode_real (expr, ptr, len);
7402 case FIXED_CST:
7403 return native_encode_fixed (expr, ptr, len);
7405 case COMPLEX_CST:
7406 return native_encode_complex (expr, ptr, len);
7408 case VECTOR_CST:
7409 return native_encode_vector (expr, ptr, len);
7411 case STRING_CST:
7412 return native_encode_string (expr, ptr, len);
7414 default:
7415 return 0;
7420 /* Subroutine of native_interpret_expr. Interpret the contents of
7421 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7422 If the buffer cannot be interpreted, return NULL_TREE. */
7424 static tree
7425 native_interpret_int (tree type, const unsigned char *ptr, int len)
7427 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7428 double_int result;
7430 if (total_bytes > len
7431 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7432 return NULL_TREE;
7434 result = double_int::from_buffer (ptr, total_bytes);
7436 return double_int_to_tree (type, result);
7440 /* Subroutine of native_interpret_expr. Interpret the contents of
7441 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7442 If the buffer cannot be interpreted, return NULL_TREE. */
7444 static tree
7445 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7447 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7448 double_int result;
7449 FIXED_VALUE_TYPE fixed_value;
7451 if (total_bytes > len
7452 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7453 return NULL_TREE;
7455 result = double_int::from_buffer (ptr, total_bytes);
7456 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7458 return build_fixed (type, fixed_value);
7462 /* Subroutine of native_interpret_expr. Interpret the contents of
7463 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7464 If the buffer cannot be interpreted, return NULL_TREE. */
7466 static tree
7467 native_interpret_real (tree type, const unsigned char *ptr, int len)
7469 enum machine_mode mode = TYPE_MODE (type);
7470 int total_bytes = GET_MODE_SIZE (mode);
7471 int byte, offset, word, words, bitpos;
7472 unsigned char value;
7473 /* There are always 32 bits in each long, no matter the size of
7474 the hosts long. We handle floating point representations with
7475 up to 192 bits. */
7476 REAL_VALUE_TYPE r;
7477 long tmp[6];
7479 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7480 if (total_bytes > len || total_bytes > 24)
7481 return NULL_TREE;
7482 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7484 memset (tmp, 0, sizeof (tmp));
7485 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7486 bitpos += BITS_PER_UNIT)
7488 byte = (bitpos / BITS_PER_UNIT) & 3;
7489 if (UNITS_PER_WORD < 4)
7491 word = byte / UNITS_PER_WORD;
7492 if (WORDS_BIG_ENDIAN)
7493 word = (words - 1) - word;
7494 offset = word * UNITS_PER_WORD;
7495 if (BYTES_BIG_ENDIAN)
7496 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7497 else
7498 offset += byte % UNITS_PER_WORD;
7500 else
7501 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7502 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7504 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7507 real_from_target (&r, tmp, mode);
7508 return build_real (type, r);
7512 /* Subroutine of native_interpret_expr. Interpret the contents of
7513 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7514 If the buffer cannot be interpreted, return NULL_TREE. */
7516 static tree
7517 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7519 tree etype, rpart, ipart;
7520 int size;
7522 etype = TREE_TYPE (type);
7523 size = GET_MODE_SIZE (TYPE_MODE (etype));
7524 if (size * 2 > len)
7525 return NULL_TREE;
7526 rpart = native_interpret_expr (etype, ptr, size);
7527 if (!rpart)
7528 return NULL_TREE;
7529 ipart = native_interpret_expr (etype, ptr+size, size);
7530 if (!ipart)
7531 return NULL_TREE;
7532 return build_complex (type, rpart, ipart);
7536 /* Subroutine of native_interpret_expr. Interpret the contents of
7537 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7538 If the buffer cannot be interpreted, return NULL_TREE. */
7540 static tree
7541 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7543 tree etype, elem;
7544 int i, size, count;
7545 tree *elements;
7547 etype = TREE_TYPE (type);
7548 size = GET_MODE_SIZE (TYPE_MODE (etype));
7549 count = TYPE_VECTOR_SUBPARTS (type);
7550 if (size * count > len)
7551 return NULL_TREE;
7553 elements = XALLOCAVEC (tree, count);
7554 for (i = count - 1; i >= 0; i--)
7556 elem = native_interpret_expr (etype, ptr+(i*size), size);
7557 if (!elem)
7558 return NULL_TREE;
7559 elements[i] = elem;
7561 return build_vector (type, elements);
7565 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7566 the buffer PTR of length LEN as a constant of type TYPE. For
7567 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7568 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7569 return NULL_TREE. */
7571 tree
7572 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7574 switch (TREE_CODE (type))
7576 case INTEGER_TYPE:
7577 case ENUMERAL_TYPE:
7578 case BOOLEAN_TYPE:
7579 case POINTER_TYPE:
7580 case REFERENCE_TYPE:
7581 return native_interpret_int (type, ptr, len);
7583 case REAL_TYPE:
7584 return native_interpret_real (type, ptr, len);
7586 case FIXED_POINT_TYPE:
7587 return native_interpret_fixed (type, ptr, len);
7589 case COMPLEX_TYPE:
7590 return native_interpret_complex (type, ptr, len);
7592 case VECTOR_TYPE:
7593 return native_interpret_vector (type, ptr, len);
7595 default:
7596 return NULL_TREE;
7600 /* Returns true if we can interpret the contents of a native encoding
7601 as TYPE. */
7603 static bool
7604 can_native_interpret_type_p (tree type)
7606 switch (TREE_CODE (type))
7608 case INTEGER_TYPE:
7609 case ENUMERAL_TYPE:
7610 case BOOLEAN_TYPE:
7611 case POINTER_TYPE:
7612 case REFERENCE_TYPE:
7613 case FIXED_POINT_TYPE:
7614 case REAL_TYPE:
7615 case COMPLEX_TYPE:
7616 case VECTOR_TYPE:
7617 return true;
7618 default:
7619 return false;
7623 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7624 TYPE at compile-time. If we're unable to perform the conversion
7625 return NULL_TREE. */
7627 static tree
7628 fold_view_convert_expr (tree type, tree expr)
7630 /* We support up to 512-bit values (for V8DFmode). */
7631 unsigned char buffer[64];
7632 int len;
7634 /* Check that the host and target are sane. */
7635 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7636 return NULL_TREE;
7638 len = native_encode_expr (expr, buffer, sizeof (buffer));
7639 if (len == 0)
7640 return NULL_TREE;
7642 return native_interpret_expr (type, buffer, len);
7645 /* Build an expression for the address of T. Folds away INDIRECT_REF
7646 to avoid confusing the gimplify process. */
7648 tree
7649 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7651 /* The size of the object is not relevant when talking about its address. */
7652 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7653 t = TREE_OPERAND (t, 0);
7655 if (TREE_CODE (t) == INDIRECT_REF)
7657 t = TREE_OPERAND (t, 0);
7659 if (TREE_TYPE (t) != ptrtype)
7660 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7662 else if (TREE_CODE (t) == MEM_REF
7663 && integer_zerop (TREE_OPERAND (t, 1)))
7664 return TREE_OPERAND (t, 0);
7665 else if (TREE_CODE (t) == MEM_REF
7666 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7667 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7668 TREE_OPERAND (t, 0),
7669 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7670 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7672 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7674 if (TREE_TYPE (t) != ptrtype)
7675 t = fold_convert_loc (loc, ptrtype, t);
7677 else
7678 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7680 return t;
7683 /* Build an expression for the address of T. */
7685 tree
7686 build_fold_addr_expr_loc (location_t loc, tree t)
7688 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7690 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7693 static bool vec_cst_ctor_to_array (tree, tree *);
7695 /* Fold a unary expression of code CODE and type TYPE with operand
7696 OP0. Return the folded expression if folding is successful.
7697 Otherwise, return NULL_TREE. */
7699 tree
7700 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7702 tree tem;
7703 tree arg0;
7704 enum tree_code_class kind = TREE_CODE_CLASS (code);
7706 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7707 && TREE_CODE_LENGTH (code) == 1);
7709 arg0 = op0;
7710 if (arg0)
7712 if (CONVERT_EXPR_CODE_P (code)
7713 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7715 /* Don't use STRIP_NOPS, because signedness of argument type
7716 matters. */
7717 STRIP_SIGN_NOPS (arg0);
7719 else
7721 /* Strip any conversions that don't change the mode. This
7722 is safe for every expression, except for a comparison
7723 expression because its signedness is derived from its
7724 operands.
7726 Note that this is done as an internal manipulation within
7727 the constant folder, in order to find the simplest
7728 representation of the arguments so that their form can be
7729 studied. In any cases, the appropriate type conversions
7730 should be put back in the tree that will get out of the
7731 constant folder. */
7732 STRIP_NOPS (arg0);
7736 if (TREE_CODE_CLASS (code) == tcc_unary)
7738 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7739 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7740 fold_build1_loc (loc, code, type,
7741 fold_convert_loc (loc, TREE_TYPE (op0),
7742 TREE_OPERAND (arg0, 1))));
7743 else if (TREE_CODE (arg0) == COND_EXPR)
7745 tree arg01 = TREE_OPERAND (arg0, 1);
7746 tree arg02 = TREE_OPERAND (arg0, 2);
7747 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7748 arg01 = fold_build1_loc (loc, code, type,
7749 fold_convert_loc (loc,
7750 TREE_TYPE (op0), arg01));
7751 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7752 arg02 = fold_build1_loc (loc, code, type,
7753 fold_convert_loc (loc,
7754 TREE_TYPE (op0), arg02));
7755 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7756 arg01, arg02);
7758 /* If this was a conversion, and all we did was to move into
7759 inside the COND_EXPR, bring it back out. But leave it if
7760 it is a conversion from integer to integer and the
7761 result precision is no wider than a word since such a
7762 conversion is cheap and may be optimized away by combine,
7763 while it couldn't if it were outside the COND_EXPR. Then return
7764 so we don't get into an infinite recursion loop taking the
7765 conversion out and then back in. */
7767 if ((CONVERT_EXPR_CODE_P (code)
7768 || code == NON_LVALUE_EXPR)
7769 && TREE_CODE (tem) == COND_EXPR
7770 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7771 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7772 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7773 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7774 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7775 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7776 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7777 && (INTEGRAL_TYPE_P
7778 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7779 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7780 || flag_syntax_only))
7781 tem = build1_loc (loc, code, type,
7782 build3 (COND_EXPR,
7783 TREE_TYPE (TREE_OPERAND
7784 (TREE_OPERAND (tem, 1), 0)),
7785 TREE_OPERAND (tem, 0),
7786 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7787 TREE_OPERAND (TREE_OPERAND (tem, 2),
7788 0)));
7789 return tem;
7793 switch (code)
7795 case PAREN_EXPR:
7796 /* Re-association barriers around constants and other re-association
7797 barriers can be removed. */
7798 if (CONSTANT_CLASS_P (op0)
7799 || TREE_CODE (op0) == PAREN_EXPR)
7800 return fold_convert_loc (loc, type, op0);
7801 return NULL_TREE;
7803 CASE_CONVERT:
7804 case FLOAT_EXPR:
7805 case FIX_TRUNC_EXPR:
7806 if (TREE_TYPE (op0) == type)
7807 return op0;
7809 if (COMPARISON_CLASS_P (op0))
7811 /* If we have (type) (a CMP b) and type is an integral type, return
7812 new expression involving the new type. Canonicalize
7813 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7814 non-integral type.
7815 Do not fold the result as that would not simplify further, also
7816 folding again results in recursions. */
7817 if (TREE_CODE (type) == BOOLEAN_TYPE)
7818 return build2_loc (loc, TREE_CODE (op0), type,
7819 TREE_OPERAND (op0, 0),
7820 TREE_OPERAND (op0, 1));
7821 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7822 && TREE_CODE (type) != VECTOR_TYPE)
7823 return build3_loc (loc, COND_EXPR, type, op0,
7824 constant_boolean_node (true, type),
7825 constant_boolean_node (false, type));
7828 /* Handle cases of two conversions in a row. */
7829 if (CONVERT_EXPR_P (op0))
7831 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7832 tree inter_type = TREE_TYPE (op0);
7833 int inside_int = INTEGRAL_TYPE_P (inside_type);
7834 int inside_ptr = POINTER_TYPE_P (inside_type);
7835 int inside_float = FLOAT_TYPE_P (inside_type);
7836 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7837 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7838 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7839 int inter_int = INTEGRAL_TYPE_P (inter_type);
7840 int inter_ptr = POINTER_TYPE_P (inter_type);
7841 int inter_float = FLOAT_TYPE_P (inter_type);
7842 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7843 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7844 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7845 int final_int = INTEGRAL_TYPE_P (type);
7846 int final_ptr = POINTER_TYPE_P (type);
7847 int final_float = FLOAT_TYPE_P (type);
7848 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7849 unsigned int final_prec = TYPE_PRECISION (type);
7850 int final_unsignedp = TYPE_UNSIGNED (type);
7852 /* In addition to the cases of two conversions in a row
7853 handled below, if we are converting something to its own
7854 type via an object of identical or wider precision, neither
7855 conversion is needed. */
7856 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7857 && (((inter_int || inter_ptr) && final_int)
7858 || (inter_float && final_float))
7859 && inter_prec >= final_prec)
7860 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7862 /* Likewise, if the intermediate and initial types are either both
7863 float or both integer, we don't need the middle conversion if the
7864 former is wider than the latter and doesn't change the signedness
7865 (for integers). Avoid this if the final type is a pointer since
7866 then we sometimes need the middle conversion. Likewise if the
7867 final type has a precision not equal to the size of its mode. */
7868 if (((inter_int && inside_int)
7869 || (inter_float && inside_float)
7870 || (inter_vec && inside_vec))
7871 && inter_prec >= inside_prec
7872 && (inter_float || inter_vec
7873 || inter_unsignedp == inside_unsignedp)
7874 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7875 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7876 && ! final_ptr
7877 && (! final_vec || inter_prec == inside_prec))
7878 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7880 /* If we have a sign-extension of a zero-extended value, we can
7881 replace that by a single zero-extension. Likewise if the
7882 final conversion does not change precision we can drop the
7883 intermediate conversion. */
7884 if (inside_int && inter_int && final_int
7885 && ((inside_prec < inter_prec && inter_prec < final_prec
7886 && inside_unsignedp && !inter_unsignedp)
7887 || final_prec == inter_prec))
7888 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7890 /* Two conversions in a row are not needed unless:
7891 - some conversion is floating-point (overstrict for now), or
7892 - some conversion is a vector (overstrict for now), or
7893 - the intermediate type is narrower than both initial and
7894 final, or
7895 - the intermediate type and innermost type differ in signedness,
7896 and the outermost type is wider than the intermediate, or
7897 - the initial type is a pointer type and the precisions of the
7898 intermediate and final types differ, or
7899 - the final type is a pointer type and the precisions of the
7900 initial and intermediate types differ. */
7901 if (! inside_float && ! inter_float && ! final_float
7902 && ! inside_vec && ! inter_vec && ! final_vec
7903 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7904 && ! (inside_int && inter_int
7905 && inter_unsignedp != inside_unsignedp
7906 && inter_prec < final_prec)
7907 && ((inter_unsignedp && inter_prec > inside_prec)
7908 == (final_unsignedp && final_prec > inter_prec))
7909 && ! (inside_ptr && inter_prec != final_prec)
7910 && ! (final_ptr && inside_prec != inter_prec)
7911 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7912 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7913 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7916 /* Handle (T *)&A.B.C for A being of type T and B and C
7917 living at offset zero. This occurs frequently in
7918 C++ upcasting and then accessing the base. */
7919 if (TREE_CODE (op0) == ADDR_EXPR
7920 && POINTER_TYPE_P (type)
7921 && handled_component_p (TREE_OPERAND (op0, 0)))
7923 HOST_WIDE_INT bitsize, bitpos;
7924 tree offset;
7925 enum machine_mode mode;
7926 int unsignedp, volatilep;
7927 tree base = TREE_OPERAND (op0, 0);
7928 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7929 &mode, &unsignedp, &volatilep, false);
7930 /* If the reference was to a (constant) zero offset, we can use
7931 the address of the base if it has the same base type
7932 as the result type and the pointer type is unqualified. */
7933 if (! offset && bitpos == 0
7934 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7935 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7936 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7937 return fold_convert_loc (loc, type,
7938 build_fold_addr_expr_loc (loc, base));
7941 if (TREE_CODE (op0) == MODIFY_EXPR
7942 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7943 /* Detect assigning a bitfield. */
7944 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7945 && DECL_BIT_FIELD
7946 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7948 /* Don't leave an assignment inside a conversion
7949 unless assigning a bitfield. */
7950 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7951 /* First do the assignment, then return converted constant. */
7952 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7953 TREE_NO_WARNING (tem) = 1;
7954 TREE_USED (tem) = 1;
7955 return tem;
7958 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7959 constants (if x has signed type, the sign bit cannot be set
7960 in c). This folds extension into the BIT_AND_EXPR.
7961 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7962 very likely don't have maximal range for their precision and this
7963 transformation effectively doesn't preserve non-maximal ranges. */
7964 if (TREE_CODE (type) == INTEGER_TYPE
7965 && TREE_CODE (op0) == BIT_AND_EXPR
7966 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7968 tree and_expr = op0;
7969 tree and0 = TREE_OPERAND (and_expr, 0);
7970 tree and1 = TREE_OPERAND (and_expr, 1);
7971 int change = 0;
7973 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7974 || (TYPE_PRECISION (type)
7975 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7976 change = 1;
7977 else if (TYPE_PRECISION (TREE_TYPE (and1))
7978 <= HOST_BITS_PER_WIDE_INT
7979 && host_integerp (and1, 1))
7981 unsigned HOST_WIDE_INT cst;
7983 cst = tree_low_cst (and1, 1);
7984 cst &= (HOST_WIDE_INT) -1
7985 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7986 change = (cst == 0);
7987 #ifdef LOAD_EXTEND_OP
7988 if (change
7989 && !flag_syntax_only
7990 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7991 == ZERO_EXTEND))
7993 tree uns = unsigned_type_for (TREE_TYPE (and0));
7994 and0 = fold_convert_loc (loc, uns, and0);
7995 and1 = fold_convert_loc (loc, uns, and1);
7997 #endif
7999 if (change)
8001 tem = force_fit_type_double (type, tree_to_double_int (and1),
8002 0, TREE_OVERFLOW (and1));
8003 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8004 fold_convert_loc (loc, type, and0), tem);
8008 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8009 when one of the new casts will fold away. Conservatively we assume
8010 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8011 if (POINTER_TYPE_P (type)
8012 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8013 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8014 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8015 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8016 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8018 tree arg00 = TREE_OPERAND (arg0, 0);
8019 tree arg01 = TREE_OPERAND (arg0, 1);
8021 return fold_build_pointer_plus_loc
8022 (loc, fold_convert_loc (loc, type, arg00), arg01);
8025 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8026 of the same precision, and X is an integer type not narrower than
8027 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8028 if (INTEGRAL_TYPE_P (type)
8029 && TREE_CODE (op0) == BIT_NOT_EXPR
8030 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8031 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8032 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8034 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8035 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8036 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8037 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8038 fold_convert_loc (loc, type, tem));
8041 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8042 type of X and Y (integer types only). */
8043 if (INTEGRAL_TYPE_P (type)
8044 && TREE_CODE (op0) == MULT_EXPR
8045 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8046 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8048 /* Be careful not to introduce new overflows. */
8049 tree mult_type;
8050 if (TYPE_OVERFLOW_WRAPS (type))
8051 mult_type = type;
8052 else
8053 mult_type = unsigned_type_for (type);
8055 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8057 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8058 fold_convert_loc (loc, mult_type,
8059 TREE_OPERAND (op0, 0)),
8060 fold_convert_loc (loc, mult_type,
8061 TREE_OPERAND (op0, 1)));
8062 return fold_convert_loc (loc, type, tem);
8066 tem = fold_convert_const (code, type, op0);
8067 return tem ? tem : NULL_TREE;
8069 case ADDR_SPACE_CONVERT_EXPR:
8070 if (integer_zerop (arg0))
8071 return fold_convert_const (code, type, arg0);
8072 return NULL_TREE;
8074 case FIXED_CONVERT_EXPR:
8075 tem = fold_convert_const (code, type, arg0);
8076 return tem ? tem : NULL_TREE;
8078 case VIEW_CONVERT_EXPR:
8079 if (TREE_TYPE (op0) == type)
8080 return op0;
8081 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8082 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8083 type, TREE_OPERAND (op0, 0));
8084 if (TREE_CODE (op0) == MEM_REF)
8085 return fold_build2_loc (loc, MEM_REF, type,
8086 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8088 /* For integral conversions with the same precision or pointer
8089 conversions use a NOP_EXPR instead. */
8090 if ((INTEGRAL_TYPE_P (type)
8091 || POINTER_TYPE_P (type))
8092 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8093 || POINTER_TYPE_P (TREE_TYPE (op0)))
8094 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8095 return fold_convert_loc (loc, type, op0);
8097 /* Strip inner integral conversions that do not change the precision. */
8098 if (CONVERT_EXPR_P (op0)
8099 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8100 || POINTER_TYPE_P (TREE_TYPE (op0)))
8101 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8102 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8103 && (TYPE_PRECISION (TREE_TYPE (op0))
8104 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8105 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8106 type, TREE_OPERAND (op0, 0));
8108 return fold_view_convert_expr (type, op0);
8110 case NEGATE_EXPR:
8111 tem = fold_negate_expr (loc, arg0);
8112 if (tem)
8113 return fold_convert_loc (loc, type, tem);
8114 return NULL_TREE;
8116 case ABS_EXPR:
8117 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8118 return fold_abs_const (arg0, type);
8119 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8120 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8121 /* Convert fabs((double)float) into (double)fabsf(float). */
8122 else if (TREE_CODE (arg0) == NOP_EXPR
8123 && TREE_CODE (type) == REAL_TYPE)
8125 tree targ0 = strip_float_extensions (arg0);
8126 if (targ0 != arg0)
8127 return fold_convert_loc (loc, type,
8128 fold_build1_loc (loc, ABS_EXPR,
8129 TREE_TYPE (targ0),
8130 targ0));
8132 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8133 else if (TREE_CODE (arg0) == ABS_EXPR)
8134 return arg0;
8135 else if (tree_expr_nonnegative_p (arg0))
8136 return arg0;
8138 /* Strip sign ops from argument. */
8139 if (TREE_CODE (type) == REAL_TYPE)
8141 tem = fold_strip_sign_ops (arg0);
8142 if (tem)
8143 return fold_build1_loc (loc, ABS_EXPR, type,
8144 fold_convert_loc (loc, type, tem));
8146 return NULL_TREE;
8148 case CONJ_EXPR:
8149 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8150 return fold_convert_loc (loc, type, arg0);
8151 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8153 tree itype = TREE_TYPE (type);
8154 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8155 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8156 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8157 negate_expr (ipart));
8159 if (TREE_CODE (arg0) == COMPLEX_CST)
8161 tree itype = TREE_TYPE (type);
8162 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8163 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8164 return build_complex (type, rpart, negate_expr (ipart));
8166 if (TREE_CODE (arg0) == CONJ_EXPR)
8167 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8168 return NULL_TREE;
8170 case BIT_NOT_EXPR:
8171 if (TREE_CODE (arg0) == INTEGER_CST)
8172 return fold_not_const (arg0, type);
8173 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8174 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8175 /* Convert ~ (-A) to A - 1. */
8176 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8177 return fold_build2_loc (loc, MINUS_EXPR, type,
8178 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8179 build_int_cst (type, 1));
8180 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8181 else if (INTEGRAL_TYPE_P (type)
8182 && ((TREE_CODE (arg0) == MINUS_EXPR
8183 && integer_onep (TREE_OPERAND (arg0, 1)))
8184 || (TREE_CODE (arg0) == PLUS_EXPR
8185 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8186 return fold_build1_loc (loc, NEGATE_EXPR, type,
8187 fold_convert_loc (loc, type,
8188 TREE_OPERAND (arg0, 0)));
8189 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8190 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8191 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8192 fold_convert_loc (loc, type,
8193 TREE_OPERAND (arg0, 0)))))
8194 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8195 fold_convert_loc (loc, type,
8196 TREE_OPERAND (arg0, 1)));
8197 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8198 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8199 fold_convert_loc (loc, type,
8200 TREE_OPERAND (arg0, 1)))))
8201 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8202 fold_convert_loc (loc, type,
8203 TREE_OPERAND (arg0, 0)), tem);
8204 /* Perform BIT_NOT_EXPR on each element individually. */
8205 else if (TREE_CODE (arg0) == VECTOR_CST)
8207 tree *elements;
8208 tree elem;
8209 unsigned count = VECTOR_CST_NELTS (arg0), i;
8211 elements = XALLOCAVEC (tree, count);
8212 for (i = 0; i < count; i++)
8214 elem = VECTOR_CST_ELT (arg0, i);
8215 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8216 if (elem == NULL_TREE)
8217 break;
8218 elements[i] = elem;
8220 if (i == count)
8221 return build_vector (type, elements);
8224 return NULL_TREE;
8226 case TRUTH_NOT_EXPR:
8227 /* The argument to invert_truthvalue must have Boolean type. */
8228 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8229 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8231 /* Note that the operand of this must be an int
8232 and its values must be 0 or 1.
8233 ("true" is a fixed value perhaps depending on the language,
8234 but we don't handle values other than 1 correctly yet.) */
8235 tem = fold_truth_not_expr (loc, arg0);
8236 if (!tem)
8237 return NULL_TREE;
8238 return fold_convert_loc (loc, type, tem);
8240 case REALPART_EXPR:
8241 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8242 return fold_convert_loc (loc, type, arg0);
8243 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8244 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8245 TREE_OPERAND (arg0, 1));
8246 if (TREE_CODE (arg0) == COMPLEX_CST)
8247 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8248 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8250 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8251 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8252 fold_build1_loc (loc, REALPART_EXPR, itype,
8253 TREE_OPERAND (arg0, 0)),
8254 fold_build1_loc (loc, REALPART_EXPR, itype,
8255 TREE_OPERAND (arg0, 1)));
8256 return fold_convert_loc (loc, type, tem);
8258 if (TREE_CODE (arg0) == CONJ_EXPR)
8260 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8261 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8262 TREE_OPERAND (arg0, 0));
8263 return fold_convert_loc (loc, type, tem);
8265 if (TREE_CODE (arg0) == CALL_EXPR)
8267 tree fn = get_callee_fndecl (arg0);
8268 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8269 switch (DECL_FUNCTION_CODE (fn))
8271 CASE_FLT_FN (BUILT_IN_CEXPI):
8272 fn = mathfn_built_in (type, BUILT_IN_COS);
8273 if (fn)
8274 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8275 break;
8277 default:
8278 break;
8281 return NULL_TREE;
8283 case IMAGPART_EXPR:
8284 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8285 return build_zero_cst (type);
8286 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8287 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8288 TREE_OPERAND (arg0, 0));
8289 if (TREE_CODE (arg0) == COMPLEX_CST)
8290 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8291 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8293 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8294 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8295 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8296 TREE_OPERAND (arg0, 0)),
8297 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8298 TREE_OPERAND (arg0, 1)));
8299 return fold_convert_loc (loc, type, tem);
8301 if (TREE_CODE (arg0) == CONJ_EXPR)
8303 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8304 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8305 return fold_convert_loc (loc, type, negate_expr (tem));
8307 if (TREE_CODE (arg0) == CALL_EXPR)
8309 tree fn = get_callee_fndecl (arg0);
8310 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8311 switch (DECL_FUNCTION_CODE (fn))
8313 CASE_FLT_FN (BUILT_IN_CEXPI):
8314 fn = mathfn_built_in (type, BUILT_IN_SIN);
8315 if (fn)
8316 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8317 break;
8319 default:
8320 break;
8323 return NULL_TREE;
8325 case INDIRECT_REF:
8326 /* Fold *&X to X if X is an lvalue. */
8327 if (TREE_CODE (op0) == ADDR_EXPR)
8329 tree op00 = TREE_OPERAND (op0, 0);
8330 if ((TREE_CODE (op00) == VAR_DECL
8331 || TREE_CODE (op00) == PARM_DECL
8332 || TREE_CODE (op00) == RESULT_DECL)
8333 && !TREE_READONLY (op00))
8334 return op00;
8336 return NULL_TREE;
8338 case VEC_UNPACK_LO_EXPR:
8339 case VEC_UNPACK_HI_EXPR:
8340 case VEC_UNPACK_FLOAT_LO_EXPR:
8341 case VEC_UNPACK_FLOAT_HI_EXPR:
8343 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8344 tree *elts;
8345 enum tree_code subcode;
8347 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8348 if (TREE_CODE (arg0) != VECTOR_CST)
8349 return NULL_TREE;
8351 elts = XALLOCAVEC (tree, nelts * 2);
8352 if (!vec_cst_ctor_to_array (arg0, elts))
8353 return NULL_TREE;
8355 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8356 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8357 elts += nelts;
8359 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8360 subcode = NOP_EXPR;
8361 else
8362 subcode = FLOAT_EXPR;
8364 for (i = 0; i < nelts; i++)
8366 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8367 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8368 return NULL_TREE;
8371 return build_vector (type, elts);
8374 case REDUC_MIN_EXPR:
8375 case REDUC_MAX_EXPR:
8376 case REDUC_PLUS_EXPR:
8378 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8379 tree *elts;
8380 enum tree_code subcode;
8382 if (TREE_CODE (op0) != VECTOR_CST)
8383 return NULL_TREE;
8385 elts = XALLOCAVEC (tree, nelts);
8386 if (!vec_cst_ctor_to_array (op0, elts))
8387 return NULL_TREE;
8389 switch (code)
8391 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8392 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8393 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8394 default: gcc_unreachable ();
8397 for (i = 1; i < nelts; i++)
8399 elts[0] = const_binop (subcode, elts[0], elts[i]);
8400 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8401 return NULL_TREE;
8402 elts[i] = build_zero_cst (TREE_TYPE (type));
8405 return build_vector (type, elts);
8408 default:
8409 return NULL_TREE;
8410 } /* switch (code) */
8414 /* If the operation was a conversion do _not_ mark a resulting constant
8415 with TREE_OVERFLOW if the original constant was not. These conversions
8416 have implementation defined behavior and retaining the TREE_OVERFLOW
8417 flag here would confuse later passes such as VRP. */
8418 tree
8419 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8420 tree type, tree op0)
8422 tree res = fold_unary_loc (loc, code, type, op0);
8423 if (res
8424 && TREE_CODE (res) == INTEGER_CST
8425 && TREE_CODE (op0) == INTEGER_CST
8426 && CONVERT_EXPR_CODE_P (code))
8427 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8429 return res;
8432 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8433 operands OP0 and OP1. LOC is the location of the resulting expression.
8434 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8435 Return the folded expression if folding is successful. Otherwise,
8436 return NULL_TREE. */
8437 static tree
8438 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8439 tree arg0, tree arg1, tree op0, tree op1)
8441 tree tem;
8443 /* We only do these simplifications if we are optimizing. */
8444 if (!optimize)
8445 return NULL_TREE;
8447 /* Check for things like (A || B) && (A || C). We can convert this
8448 to A || (B && C). Note that either operator can be any of the four
8449 truth and/or operations and the transformation will still be
8450 valid. Also note that we only care about order for the
8451 ANDIF and ORIF operators. If B contains side effects, this
8452 might change the truth-value of A. */
8453 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8454 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8455 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8456 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8457 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8458 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8460 tree a00 = TREE_OPERAND (arg0, 0);
8461 tree a01 = TREE_OPERAND (arg0, 1);
8462 tree a10 = TREE_OPERAND (arg1, 0);
8463 tree a11 = TREE_OPERAND (arg1, 1);
8464 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8465 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8466 && (code == TRUTH_AND_EXPR
8467 || code == TRUTH_OR_EXPR));
8469 if (operand_equal_p (a00, a10, 0))
8470 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8471 fold_build2_loc (loc, code, type, a01, a11));
8472 else if (commutative && operand_equal_p (a00, a11, 0))
8473 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8474 fold_build2_loc (loc, code, type, a01, a10));
8475 else if (commutative && operand_equal_p (a01, a10, 0))
8476 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8477 fold_build2_loc (loc, code, type, a00, a11));
8479 /* This case if tricky because we must either have commutative
8480 operators or else A10 must not have side-effects. */
8482 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8483 && operand_equal_p (a01, a11, 0))
8484 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8485 fold_build2_loc (loc, code, type, a00, a10),
8486 a01);
8489 /* See if we can build a range comparison. */
8490 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8491 return tem;
8493 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8494 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8496 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8497 if (tem)
8498 return fold_build2_loc (loc, code, type, tem, arg1);
8501 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8502 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8504 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8505 if (tem)
8506 return fold_build2_loc (loc, code, type, arg0, tem);
8509 /* Check for the possibility of merging component references. If our
8510 lhs is another similar operation, try to merge its rhs with our
8511 rhs. Then try to merge our lhs and rhs. */
8512 if (TREE_CODE (arg0) == code
8513 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8514 TREE_OPERAND (arg0, 1), arg1)))
8515 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8517 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8518 return tem;
8520 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8521 && (code == TRUTH_AND_EXPR
8522 || code == TRUTH_ANDIF_EXPR
8523 || code == TRUTH_OR_EXPR
8524 || code == TRUTH_ORIF_EXPR))
8526 enum tree_code ncode, icode;
8528 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8529 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8530 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8532 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8533 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8534 We don't want to pack more than two leafs to a non-IF AND/OR
8535 expression.
8536 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8537 equal to IF-CODE, then we don't want to add right-hand operand.
8538 If the inner right-hand side of left-hand operand has
8539 side-effects, or isn't simple, then we can't add to it,
8540 as otherwise we might destroy if-sequence. */
8541 if (TREE_CODE (arg0) == icode
8542 && simple_operand_p_2 (arg1)
8543 /* Needed for sequence points to handle trappings, and
8544 side-effects. */
8545 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8547 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8548 arg1);
8549 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8550 tem);
8552 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8553 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8554 else if (TREE_CODE (arg1) == icode
8555 && simple_operand_p_2 (arg0)
8556 /* Needed for sequence points to handle trappings, and
8557 side-effects. */
8558 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8560 tem = fold_build2_loc (loc, ncode, type,
8561 arg0, TREE_OPERAND (arg1, 0));
8562 return fold_build2_loc (loc, icode, type, tem,
8563 TREE_OPERAND (arg1, 1));
8565 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8566 into (A OR B).
8567 For sequence point consistancy, we need to check for trapping,
8568 and side-effects. */
8569 else if (code == icode && simple_operand_p_2 (arg0)
8570 && simple_operand_p_2 (arg1))
8571 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8574 return NULL_TREE;
8577 /* Fold a binary expression of code CODE and type TYPE with operands
8578 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8579 Return the folded expression if folding is successful. Otherwise,
8580 return NULL_TREE. */
8582 static tree
8583 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8585 enum tree_code compl_code;
8587 if (code == MIN_EXPR)
8588 compl_code = MAX_EXPR;
8589 else if (code == MAX_EXPR)
8590 compl_code = MIN_EXPR;
8591 else
8592 gcc_unreachable ();
8594 /* MIN (MAX (a, b), b) == b. */
8595 if (TREE_CODE (op0) == compl_code
8596 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8597 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8599 /* MIN (MAX (b, a), b) == b. */
8600 if (TREE_CODE (op0) == compl_code
8601 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8602 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8603 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8605 /* MIN (a, MAX (a, b)) == a. */
8606 if (TREE_CODE (op1) == compl_code
8607 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8608 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8609 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8611 /* MIN (a, MAX (b, a)) == a. */
8612 if (TREE_CODE (op1) == compl_code
8613 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8614 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8615 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8617 return NULL_TREE;
8620 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8621 by changing CODE to reduce the magnitude of constants involved in
8622 ARG0 of the comparison.
8623 Returns a canonicalized comparison tree if a simplification was
8624 possible, otherwise returns NULL_TREE.
8625 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8626 valid if signed overflow is undefined. */
8628 static tree
8629 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8630 tree arg0, tree arg1,
8631 bool *strict_overflow_p)
8633 enum tree_code code0 = TREE_CODE (arg0);
8634 tree t, cst0 = NULL_TREE;
8635 int sgn0;
8636 bool swap = false;
8638 /* Match A +- CST code arg1 and CST code arg1. We can change the
8639 first form only if overflow is undefined. */
8640 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8641 /* In principle pointers also have undefined overflow behavior,
8642 but that causes problems elsewhere. */
8643 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8644 && (code0 == MINUS_EXPR
8645 || code0 == PLUS_EXPR)
8646 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8647 || code0 == INTEGER_CST))
8648 return NULL_TREE;
8650 /* Identify the constant in arg0 and its sign. */
8651 if (code0 == INTEGER_CST)
8652 cst0 = arg0;
8653 else
8654 cst0 = TREE_OPERAND (arg0, 1);
8655 sgn0 = tree_int_cst_sgn (cst0);
8657 /* Overflowed constants and zero will cause problems. */
8658 if (integer_zerop (cst0)
8659 || TREE_OVERFLOW (cst0))
8660 return NULL_TREE;
8662 /* See if we can reduce the magnitude of the constant in
8663 arg0 by changing the comparison code. */
8664 if (code0 == INTEGER_CST)
8666 /* CST <= arg1 -> CST-1 < arg1. */
8667 if (code == LE_EXPR && sgn0 == 1)
8668 code = LT_EXPR;
8669 /* -CST < arg1 -> -CST-1 <= arg1. */
8670 else if (code == LT_EXPR && sgn0 == -1)
8671 code = LE_EXPR;
8672 /* CST > arg1 -> CST-1 >= arg1. */
8673 else if (code == GT_EXPR && sgn0 == 1)
8674 code = GE_EXPR;
8675 /* -CST >= arg1 -> -CST-1 > arg1. */
8676 else if (code == GE_EXPR && sgn0 == -1)
8677 code = GT_EXPR;
8678 else
8679 return NULL_TREE;
8680 /* arg1 code' CST' might be more canonical. */
8681 swap = true;
8683 else
8685 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8686 if (code == LT_EXPR
8687 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8688 code = LE_EXPR;
8689 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8690 else if (code == GT_EXPR
8691 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8692 code = GE_EXPR;
8693 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8694 else if (code == LE_EXPR
8695 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8696 code = LT_EXPR;
8697 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8698 else if (code == GE_EXPR
8699 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8700 code = GT_EXPR;
8701 else
8702 return NULL_TREE;
8703 *strict_overflow_p = true;
8706 /* Now build the constant reduced in magnitude. But not if that
8707 would produce one outside of its types range. */
8708 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8709 && ((sgn0 == 1
8710 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8711 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8712 || (sgn0 == -1
8713 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8714 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8715 /* We cannot swap the comparison here as that would cause us to
8716 endlessly recurse. */
8717 return NULL_TREE;
8719 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8720 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8721 if (code0 != INTEGER_CST)
8722 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8723 t = fold_convert (TREE_TYPE (arg1), t);
8725 /* If swapping might yield to a more canonical form, do so. */
8726 if (swap)
8727 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8728 else
8729 return fold_build2_loc (loc, code, type, t, arg1);
8732 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8733 overflow further. Try to decrease the magnitude of constants involved
8734 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8735 and put sole constants at the second argument position.
8736 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8738 static tree
8739 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8740 tree arg0, tree arg1)
8742 tree t;
8743 bool strict_overflow_p;
8744 const char * const warnmsg = G_("assuming signed overflow does not occur "
8745 "when reducing constant in comparison");
8747 /* Try canonicalization by simplifying arg0. */
8748 strict_overflow_p = false;
8749 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8750 &strict_overflow_p);
8751 if (t)
8753 if (strict_overflow_p)
8754 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8755 return t;
8758 /* Try canonicalization by simplifying arg1 using the swapped
8759 comparison. */
8760 code = swap_tree_comparison (code);
8761 strict_overflow_p = false;
8762 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8763 &strict_overflow_p);
8764 if (t && strict_overflow_p)
8765 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8766 return t;
8769 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8770 space. This is used to avoid issuing overflow warnings for
8771 expressions like &p->x which can not wrap. */
8773 static bool
8774 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8776 double_int di_offset, total;
8778 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8779 return true;
8781 if (bitpos < 0)
8782 return true;
8784 if (offset == NULL_TREE)
8785 di_offset = double_int_zero;
8786 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8787 return true;
8788 else
8789 di_offset = TREE_INT_CST (offset);
8791 bool overflow;
8792 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8793 total = di_offset.add_with_sign (units, true, &overflow);
8794 if (overflow)
8795 return true;
8797 if (total.high != 0)
8798 return true;
8800 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8801 if (size <= 0)
8802 return true;
8804 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8805 array. */
8806 if (TREE_CODE (base) == ADDR_EXPR)
8808 HOST_WIDE_INT base_size;
8810 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8811 if (base_size > 0 && size < base_size)
8812 size = base_size;
8815 return total.low > (unsigned HOST_WIDE_INT) size;
8818 /* Subroutine of fold_binary. This routine performs all of the
8819 transformations that are common to the equality/inequality
8820 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8821 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8822 fold_binary should call fold_binary. Fold a comparison with
8823 tree code CODE and type TYPE with operands OP0 and OP1. Return
8824 the folded comparison or NULL_TREE. */
8826 static tree
8827 fold_comparison (location_t loc, enum tree_code code, tree type,
8828 tree op0, tree op1)
8830 tree arg0, arg1, tem;
8832 arg0 = op0;
8833 arg1 = op1;
8835 STRIP_SIGN_NOPS (arg0);
8836 STRIP_SIGN_NOPS (arg1);
8838 tem = fold_relational_const (code, type, arg0, arg1);
8839 if (tem != NULL_TREE)
8840 return tem;
8842 /* If one arg is a real or integer constant, put it last. */
8843 if (tree_swap_operands_p (arg0, arg1, true))
8844 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8846 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8847 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8848 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8849 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8850 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8851 && (TREE_CODE (arg1) == INTEGER_CST
8852 && !TREE_OVERFLOW (arg1)))
8854 tree const1 = TREE_OPERAND (arg0, 1);
8855 tree const2 = arg1;
8856 tree variable = TREE_OPERAND (arg0, 0);
8857 tree lhs;
8858 int lhs_add;
8859 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8861 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8862 TREE_TYPE (arg1), const2, const1);
8864 /* If the constant operation overflowed this can be
8865 simplified as a comparison against INT_MAX/INT_MIN. */
8866 if (TREE_CODE (lhs) == INTEGER_CST
8867 && TREE_OVERFLOW (lhs))
8869 int const1_sgn = tree_int_cst_sgn (const1);
8870 enum tree_code code2 = code;
8872 /* Get the sign of the constant on the lhs if the
8873 operation were VARIABLE + CONST1. */
8874 if (TREE_CODE (arg0) == MINUS_EXPR)
8875 const1_sgn = -const1_sgn;
8877 /* The sign of the constant determines if we overflowed
8878 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8879 Canonicalize to the INT_MIN overflow by swapping the comparison
8880 if necessary. */
8881 if (const1_sgn == -1)
8882 code2 = swap_tree_comparison (code);
8884 /* We now can look at the canonicalized case
8885 VARIABLE + 1 CODE2 INT_MIN
8886 and decide on the result. */
8887 if (code2 == LT_EXPR
8888 || code2 == LE_EXPR
8889 || code2 == EQ_EXPR)
8890 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8891 else if (code2 == NE_EXPR
8892 || code2 == GE_EXPR
8893 || code2 == GT_EXPR)
8894 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8897 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8898 && (TREE_CODE (lhs) != INTEGER_CST
8899 || !TREE_OVERFLOW (lhs)))
8901 if (code != EQ_EXPR && code != NE_EXPR)
8902 fold_overflow_warning ("assuming signed overflow does not occur "
8903 "when changing X +- C1 cmp C2 to "
8904 "X cmp C1 +- C2",
8905 WARN_STRICT_OVERFLOW_COMPARISON);
8906 return fold_build2_loc (loc, code, type, variable, lhs);
8910 /* For comparisons of pointers we can decompose it to a compile time
8911 comparison of the base objects and the offsets into the object.
8912 This requires at least one operand being an ADDR_EXPR or a
8913 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8914 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8915 && (TREE_CODE (arg0) == ADDR_EXPR
8916 || TREE_CODE (arg1) == ADDR_EXPR
8917 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8918 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8920 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8921 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8922 enum machine_mode mode;
8923 int volatilep, unsignedp;
8924 bool indirect_base0 = false, indirect_base1 = false;
8926 /* Get base and offset for the access. Strip ADDR_EXPR for
8927 get_inner_reference, but put it back by stripping INDIRECT_REF
8928 off the base object if possible. indirect_baseN will be true
8929 if baseN is not an address but refers to the object itself. */
8930 base0 = arg0;
8931 if (TREE_CODE (arg0) == ADDR_EXPR)
8933 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8934 &bitsize, &bitpos0, &offset0, &mode,
8935 &unsignedp, &volatilep, false);
8936 if (TREE_CODE (base0) == INDIRECT_REF)
8937 base0 = TREE_OPERAND (base0, 0);
8938 else
8939 indirect_base0 = true;
8941 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8943 base0 = TREE_OPERAND (arg0, 0);
8944 STRIP_SIGN_NOPS (base0);
8945 if (TREE_CODE (base0) == ADDR_EXPR)
8947 base0 = TREE_OPERAND (base0, 0);
8948 indirect_base0 = true;
8950 offset0 = TREE_OPERAND (arg0, 1);
8951 if (host_integerp (offset0, 0))
8953 HOST_WIDE_INT off = size_low_cst (offset0);
8954 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8955 * BITS_PER_UNIT)
8956 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8958 bitpos0 = off * BITS_PER_UNIT;
8959 offset0 = NULL_TREE;
8964 base1 = arg1;
8965 if (TREE_CODE (arg1) == ADDR_EXPR)
8967 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8968 &bitsize, &bitpos1, &offset1, &mode,
8969 &unsignedp, &volatilep, false);
8970 if (TREE_CODE (base1) == INDIRECT_REF)
8971 base1 = TREE_OPERAND (base1, 0);
8972 else
8973 indirect_base1 = true;
8975 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8977 base1 = TREE_OPERAND (arg1, 0);
8978 STRIP_SIGN_NOPS (base1);
8979 if (TREE_CODE (base1) == ADDR_EXPR)
8981 base1 = TREE_OPERAND (base1, 0);
8982 indirect_base1 = true;
8984 offset1 = TREE_OPERAND (arg1, 1);
8985 if (host_integerp (offset1, 0))
8987 HOST_WIDE_INT off = size_low_cst (offset1);
8988 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8989 * BITS_PER_UNIT)
8990 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8992 bitpos1 = off * BITS_PER_UNIT;
8993 offset1 = NULL_TREE;
8998 /* A local variable can never be pointed to by
8999 the default SSA name of an incoming parameter. */
9000 if ((TREE_CODE (arg0) == ADDR_EXPR
9001 && indirect_base0
9002 && TREE_CODE (base0) == VAR_DECL
9003 && auto_var_in_fn_p (base0, current_function_decl)
9004 && !indirect_base1
9005 && TREE_CODE (base1) == SSA_NAME
9006 && SSA_NAME_IS_DEFAULT_DEF (base1)
9007 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9008 || (TREE_CODE (arg1) == ADDR_EXPR
9009 && indirect_base1
9010 && TREE_CODE (base1) == VAR_DECL
9011 && auto_var_in_fn_p (base1, current_function_decl)
9012 && !indirect_base0
9013 && TREE_CODE (base0) == SSA_NAME
9014 && SSA_NAME_IS_DEFAULT_DEF (base0)
9015 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9017 if (code == NE_EXPR)
9018 return constant_boolean_node (1, type);
9019 else if (code == EQ_EXPR)
9020 return constant_boolean_node (0, type);
9022 /* If we have equivalent bases we might be able to simplify. */
9023 else if (indirect_base0 == indirect_base1
9024 && operand_equal_p (base0, base1, 0))
9026 /* We can fold this expression to a constant if the non-constant
9027 offset parts are equal. */
9028 if ((offset0 == offset1
9029 || (offset0 && offset1
9030 && operand_equal_p (offset0, offset1, 0)))
9031 && (code == EQ_EXPR
9032 || code == NE_EXPR
9033 || (indirect_base0 && DECL_P (base0))
9034 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9037 if (code != EQ_EXPR
9038 && code != NE_EXPR
9039 && bitpos0 != bitpos1
9040 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9041 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9042 fold_overflow_warning (("assuming pointer wraparound does not "
9043 "occur when comparing P +- C1 with "
9044 "P +- C2"),
9045 WARN_STRICT_OVERFLOW_CONDITIONAL);
9047 switch (code)
9049 case EQ_EXPR:
9050 return constant_boolean_node (bitpos0 == bitpos1, type);
9051 case NE_EXPR:
9052 return constant_boolean_node (bitpos0 != bitpos1, type);
9053 case LT_EXPR:
9054 return constant_boolean_node (bitpos0 < bitpos1, type);
9055 case LE_EXPR:
9056 return constant_boolean_node (bitpos0 <= bitpos1, type);
9057 case GE_EXPR:
9058 return constant_boolean_node (bitpos0 >= bitpos1, type);
9059 case GT_EXPR:
9060 return constant_boolean_node (bitpos0 > bitpos1, type);
9061 default:;
9064 /* We can simplify the comparison to a comparison of the variable
9065 offset parts if the constant offset parts are equal.
9066 Be careful to use signed sizetype here because otherwise we
9067 mess with array offsets in the wrong way. This is possible
9068 because pointer arithmetic is restricted to retain within an
9069 object and overflow on pointer differences is undefined as of
9070 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9071 else if (bitpos0 == bitpos1
9072 && ((code == EQ_EXPR || code == NE_EXPR)
9073 || (indirect_base0 && DECL_P (base0))
9074 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9076 /* By converting to signed sizetype we cover middle-end pointer
9077 arithmetic which operates on unsigned pointer types of size
9078 type size and ARRAY_REF offsets which are properly sign or
9079 zero extended from their type in case it is narrower than
9080 sizetype. */
9081 if (offset0 == NULL_TREE)
9082 offset0 = build_int_cst (ssizetype, 0);
9083 else
9084 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9085 if (offset1 == NULL_TREE)
9086 offset1 = build_int_cst (ssizetype, 0);
9087 else
9088 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9090 if (code != EQ_EXPR
9091 && code != NE_EXPR
9092 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9093 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9094 fold_overflow_warning (("assuming pointer wraparound does not "
9095 "occur when comparing P +- C1 with "
9096 "P +- C2"),
9097 WARN_STRICT_OVERFLOW_COMPARISON);
9099 return fold_build2_loc (loc, code, type, offset0, offset1);
9102 /* For non-equal bases we can simplify if they are addresses
9103 of local binding decls or constants. */
9104 else if (indirect_base0 && indirect_base1
9105 /* We know that !operand_equal_p (base0, base1, 0)
9106 because the if condition was false. But make
9107 sure two decls are not the same. */
9108 && base0 != base1
9109 && TREE_CODE (arg0) == ADDR_EXPR
9110 && TREE_CODE (arg1) == ADDR_EXPR
9111 && (((TREE_CODE (base0) == VAR_DECL
9112 || TREE_CODE (base0) == PARM_DECL)
9113 && (targetm.binds_local_p (base0)
9114 || CONSTANT_CLASS_P (base1)))
9115 || CONSTANT_CLASS_P (base0))
9116 && (((TREE_CODE (base1) == VAR_DECL
9117 || TREE_CODE (base1) == PARM_DECL)
9118 && (targetm.binds_local_p (base1)
9119 || CONSTANT_CLASS_P (base0)))
9120 || CONSTANT_CLASS_P (base1)))
9122 if (code == EQ_EXPR)
9123 return omit_two_operands_loc (loc, type, boolean_false_node,
9124 arg0, arg1);
9125 else if (code == NE_EXPR)
9126 return omit_two_operands_loc (loc, type, boolean_true_node,
9127 arg0, arg1);
9129 /* For equal offsets we can simplify to a comparison of the
9130 base addresses. */
9131 else if (bitpos0 == bitpos1
9132 && (indirect_base0
9133 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9134 && (indirect_base1
9135 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9136 && ((offset0 == offset1)
9137 || (offset0 && offset1
9138 && operand_equal_p (offset0, offset1, 0))))
9140 if (indirect_base0)
9141 base0 = build_fold_addr_expr_loc (loc, base0);
9142 if (indirect_base1)
9143 base1 = build_fold_addr_expr_loc (loc, base1);
9144 return fold_build2_loc (loc, code, type, base0, base1);
9148 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9149 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9150 the resulting offset is smaller in absolute value than the
9151 original one. */
9152 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9153 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9154 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9155 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9156 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9157 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9158 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9160 tree const1 = TREE_OPERAND (arg0, 1);
9161 tree const2 = TREE_OPERAND (arg1, 1);
9162 tree variable1 = TREE_OPERAND (arg0, 0);
9163 tree variable2 = TREE_OPERAND (arg1, 0);
9164 tree cst;
9165 const char * const warnmsg = G_("assuming signed overflow does not "
9166 "occur when combining constants around "
9167 "a comparison");
9169 /* Put the constant on the side where it doesn't overflow and is
9170 of lower absolute value than before. */
9171 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9172 ? MINUS_EXPR : PLUS_EXPR,
9173 const2, const1);
9174 if (!TREE_OVERFLOW (cst)
9175 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9177 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9178 return fold_build2_loc (loc, code, type,
9179 variable1,
9180 fold_build2_loc (loc,
9181 TREE_CODE (arg1), TREE_TYPE (arg1),
9182 variable2, cst));
9185 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9186 ? MINUS_EXPR : PLUS_EXPR,
9187 const1, const2);
9188 if (!TREE_OVERFLOW (cst)
9189 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9191 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9192 return fold_build2_loc (loc, code, type,
9193 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9194 variable1, cst),
9195 variable2);
9199 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9200 signed arithmetic case. That form is created by the compiler
9201 often enough for folding it to be of value. One example is in
9202 computing loop trip counts after Operator Strength Reduction. */
9203 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9204 && TREE_CODE (arg0) == MULT_EXPR
9205 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9206 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9207 && integer_zerop (arg1))
9209 tree const1 = TREE_OPERAND (arg0, 1);
9210 tree const2 = arg1; /* zero */
9211 tree variable1 = TREE_OPERAND (arg0, 0);
9212 enum tree_code cmp_code = code;
9214 /* Handle unfolded multiplication by zero. */
9215 if (integer_zerop (const1))
9216 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9218 fold_overflow_warning (("assuming signed overflow does not occur when "
9219 "eliminating multiplication in comparison "
9220 "with zero"),
9221 WARN_STRICT_OVERFLOW_COMPARISON);
9223 /* If const1 is negative we swap the sense of the comparison. */
9224 if (tree_int_cst_sgn (const1) < 0)
9225 cmp_code = swap_tree_comparison (cmp_code);
9227 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9230 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9231 if (tem)
9232 return tem;
9234 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9236 tree targ0 = strip_float_extensions (arg0);
9237 tree targ1 = strip_float_extensions (arg1);
9238 tree newtype = TREE_TYPE (targ0);
9240 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9241 newtype = TREE_TYPE (targ1);
9243 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9244 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9245 return fold_build2_loc (loc, code, type,
9246 fold_convert_loc (loc, newtype, targ0),
9247 fold_convert_loc (loc, newtype, targ1));
9249 /* (-a) CMP (-b) -> b CMP a */
9250 if (TREE_CODE (arg0) == NEGATE_EXPR
9251 && TREE_CODE (arg1) == NEGATE_EXPR)
9252 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9253 TREE_OPERAND (arg0, 0));
9255 if (TREE_CODE (arg1) == REAL_CST)
9257 REAL_VALUE_TYPE cst;
9258 cst = TREE_REAL_CST (arg1);
9260 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9261 if (TREE_CODE (arg0) == NEGATE_EXPR)
9262 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9263 TREE_OPERAND (arg0, 0),
9264 build_real (TREE_TYPE (arg1),
9265 real_value_negate (&cst)));
9267 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9268 /* a CMP (-0) -> a CMP 0 */
9269 if (REAL_VALUE_MINUS_ZERO (cst))
9270 return fold_build2_loc (loc, code, type, arg0,
9271 build_real (TREE_TYPE (arg1), dconst0));
9273 /* x != NaN is always true, other ops are always false. */
9274 if (REAL_VALUE_ISNAN (cst)
9275 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9277 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9278 return omit_one_operand_loc (loc, type, tem, arg0);
9281 /* Fold comparisons against infinity. */
9282 if (REAL_VALUE_ISINF (cst)
9283 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9285 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9286 if (tem != NULL_TREE)
9287 return tem;
9291 /* If this is a comparison of a real constant with a PLUS_EXPR
9292 or a MINUS_EXPR of a real constant, we can convert it into a
9293 comparison with a revised real constant as long as no overflow
9294 occurs when unsafe_math_optimizations are enabled. */
9295 if (flag_unsafe_math_optimizations
9296 && TREE_CODE (arg1) == REAL_CST
9297 && (TREE_CODE (arg0) == PLUS_EXPR
9298 || TREE_CODE (arg0) == MINUS_EXPR)
9299 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9300 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9301 ? MINUS_EXPR : PLUS_EXPR,
9302 arg1, TREE_OPERAND (arg0, 1)))
9303 && !TREE_OVERFLOW (tem))
9304 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9306 /* Likewise, we can simplify a comparison of a real constant with
9307 a MINUS_EXPR whose first operand is also a real constant, i.e.
9308 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9309 floating-point types only if -fassociative-math is set. */
9310 if (flag_associative_math
9311 && TREE_CODE (arg1) == REAL_CST
9312 && TREE_CODE (arg0) == MINUS_EXPR
9313 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9314 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9315 arg1))
9316 && !TREE_OVERFLOW (tem))
9317 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9318 TREE_OPERAND (arg0, 1), tem);
9320 /* Fold comparisons against built-in math functions. */
9321 if (TREE_CODE (arg1) == REAL_CST
9322 && flag_unsafe_math_optimizations
9323 && ! flag_errno_math)
9325 enum built_in_function fcode = builtin_mathfn_code (arg0);
9327 if (fcode != END_BUILTINS)
9329 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9330 if (tem != NULL_TREE)
9331 return tem;
9336 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9337 && CONVERT_EXPR_P (arg0))
9339 /* If we are widening one operand of an integer comparison,
9340 see if the other operand is similarly being widened. Perhaps we
9341 can do the comparison in the narrower type. */
9342 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9343 if (tem)
9344 return tem;
9346 /* Or if we are changing signedness. */
9347 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9348 if (tem)
9349 return tem;
9352 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9353 constant, we can simplify it. */
9354 if (TREE_CODE (arg1) == INTEGER_CST
9355 && (TREE_CODE (arg0) == MIN_EXPR
9356 || TREE_CODE (arg0) == MAX_EXPR)
9357 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9359 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9360 if (tem)
9361 return tem;
9364 /* Simplify comparison of something with itself. (For IEEE
9365 floating-point, we can only do some of these simplifications.) */
9366 if (operand_equal_p (arg0, arg1, 0))
9368 switch (code)
9370 case EQ_EXPR:
9371 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9372 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9373 return constant_boolean_node (1, type);
9374 break;
9376 case GE_EXPR:
9377 case LE_EXPR:
9378 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9379 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9380 return constant_boolean_node (1, type);
9381 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9383 case NE_EXPR:
9384 /* For NE, we can only do this simplification if integer
9385 or we don't honor IEEE floating point NaNs. */
9386 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9387 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9388 break;
9389 /* ... fall through ... */
9390 case GT_EXPR:
9391 case LT_EXPR:
9392 return constant_boolean_node (0, type);
9393 default:
9394 gcc_unreachable ();
9398 /* If we are comparing an expression that just has comparisons
9399 of two integer values, arithmetic expressions of those comparisons,
9400 and constants, we can simplify it. There are only three cases
9401 to check: the two values can either be equal, the first can be
9402 greater, or the second can be greater. Fold the expression for
9403 those three values. Since each value must be 0 or 1, we have
9404 eight possibilities, each of which corresponds to the constant 0
9405 or 1 or one of the six possible comparisons.
9407 This handles common cases like (a > b) == 0 but also handles
9408 expressions like ((x > y) - (y > x)) > 0, which supposedly
9409 occur in macroized code. */
9411 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9413 tree cval1 = 0, cval2 = 0;
9414 int save_p = 0;
9416 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9417 /* Don't handle degenerate cases here; they should already
9418 have been handled anyway. */
9419 && cval1 != 0 && cval2 != 0
9420 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9421 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9422 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9423 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9424 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9425 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9426 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9428 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9429 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9431 /* We can't just pass T to eval_subst in case cval1 or cval2
9432 was the same as ARG1. */
9434 tree high_result
9435 = fold_build2_loc (loc, code, type,
9436 eval_subst (loc, arg0, cval1, maxval,
9437 cval2, minval),
9438 arg1);
9439 tree equal_result
9440 = fold_build2_loc (loc, code, type,
9441 eval_subst (loc, arg0, cval1, maxval,
9442 cval2, maxval),
9443 arg1);
9444 tree low_result
9445 = fold_build2_loc (loc, code, type,
9446 eval_subst (loc, arg0, cval1, minval,
9447 cval2, maxval),
9448 arg1);
9450 /* All three of these results should be 0 or 1. Confirm they are.
9451 Then use those values to select the proper code to use. */
9453 if (TREE_CODE (high_result) == INTEGER_CST
9454 && TREE_CODE (equal_result) == INTEGER_CST
9455 && TREE_CODE (low_result) == INTEGER_CST)
9457 /* Make a 3-bit mask with the high-order bit being the
9458 value for `>', the next for '=', and the low for '<'. */
9459 switch ((integer_onep (high_result) * 4)
9460 + (integer_onep (equal_result) * 2)
9461 + integer_onep (low_result))
9463 case 0:
9464 /* Always false. */
9465 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9466 case 1:
9467 code = LT_EXPR;
9468 break;
9469 case 2:
9470 code = EQ_EXPR;
9471 break;
9472 case 3:
9473 code = LE_EXPR;
9474 break;
9475 case 4:
9476 code = GT_EXPR;
9477 break;
9478 case 5:
9479 code = NE_EXPR;
9480 break;
9481 case 6:
9482 code = GE_EXPR;
9483 break;
9484 case 7:
9485 /* Always true. */
9486 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9489 if (save_p)
9491 tem = save_expr (build2 (code, type, cval1, cval2));
9492 SET_EXPR_LOCATION (tem, loc);
9493 return tem;
9495 return fold_build2_loc (loc, code, type, cval1, cval2);
9500 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9501 into a single range test. */
9502 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9503 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9504 && TREE_CODE (arg1) == INTEGER_CST
9505 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9506 && !integer_zerop (TREE_OPERAND (arg0, 1))
9507 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9508 && !TREE_OVERFLOW (arg1))
9510 tem = fold_div_compare (loc, code, type, arg0, arg1);
9511 if (tem != NULL_TREE)
9512 return tem;
9515 /* Fold ~X op ~Y as Y op X. */
9516 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9517 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9519 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9520 return fold_build2_loc (loc, code, type,
9521 fold_convert_loc (loc, cmp_type,
9522 TREE_OPERAND (arg1, 0)),
9523 TREE_OPERAND (arg0, 0));
9526 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9527 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9528 && TREE_CODE (arg1) == INTEGER_CST)
9530 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9531 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9532 TREE_OPERAND (arg0, 0),
9533 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9534 fold_convert_loc (loc, cmp_type, arg1)));
9537 return NULL_TREE;
9541 /* Subroutine of fold_binary. Optimize complex multiplications of the
9542 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9543 argument EXPR represents the expression "z" of type TYPE. */
9545 static tree
9546 fold_mult_zconjz (location_t loc, tree type, tree expr)
9548 tree itype = TREE_TYPE (type);
9549 tree rpart, ipart, tem;
9551 if (TREE_CODE (expr) == COMPLEX_EXPR)
9553 rpart = TREE_OPERAND (expr, 0);
9554 ipart = TREE_OPERAND (expr, 1);
9556 else if (TREE_CODE (expr) == COMPLEX_CST)
9558 rpart = TREE_REALPART (expr);
9559 ipart = TREE_IMAGPART (expr);
9561 else
9563 expr = save_expr (expr);
9564 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9565 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9568 rpart = save_expr (rpart);
9569 ipart = save_expr (ipart);
9570 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9571 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9572 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9573 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9574 build_zero_cst (itype));
9578 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9579 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9580 guarantees that P and N have the same least significant log2(M) bits.
9581 N is not otherwise constrained. In particular, N is not normalized to
9582 0 <= N < M as is common. In general, the precise value of P is unknown.
9583 M is chosen as large as possible such that constant N can be determined.
9585 Returns M and sets *RESIDUE to N.
9587 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9588 account. This is not always possible due to PR 35705.
9591 static unsigned HOST_WIDE_INT
9592 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9593 bool allow_func_align)
9595 enum tree_code code;
9597 *residue = 0;
9599 code = TREE_CODE (expr);
9600 if (code == ADDR_EXPR)
9602 unsigned int bitalign;
9603 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9604 *residue /= BITS_PER_UNIT;
9605 return bitalign / BITS_PER_UNIT;
9607 else if (code == POINTER_PLUS_EXPR)
9609 tree op0, op1;
9610 unsigned HOST_WIDE_INT modulus;
9611 enum tree_code inner_code;
9613 op0 = TREE_OPERAND (expr, 0);
9614 STRIP_NOPS (op0);
9615 modulus = get_pointer_modulus_and_residue (op0, residue,
9616 allow_func_align);
9618 op1 = TREE_OPERAND (expr, 1);
9619 STRIP_NOPS (op1);
9620 inner_code = TREE_CODE (op1);
9621 if (inner_code == INTEGER_CST)
9623 *residue += TREE_INT_CST_LOW (op1);
9624 return modulus;
9626 else if (inner_code == MULT_EXPR)
9628 op1 = TREE_OPERAND (op1, 1);
9629 if (TREE_CODE (op1) == INTEGER_CST)
9631 unsigned HOST_WIDE_INT align;
9633 /* Compute the greatest power-of-2 divisor of op1. */
9634 align = TREE_INT_CST_LOW (op1);
9635 align &= -align;
9637 /* If align is non-zero and less than *modulus, replace
9638 *modulus with align., If align is 0, then either op1 is 0
9639 or the greatest power-of-2 divisor of op1 doesn't fit in an
9640 unsigned HOST_WIDE_INT. In either case, no additional
9641 constraint is imposed. */
9642 if (align)
9643 modulus = MIN (modulus, align);
9645 return modulus;
9650 /* If we get here, we were unable to determine anything useful about the
9651 expression. */
9652 return 1;
9655 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9656 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9658 static bool
9659 vec_cst_ctor_to_array (tree arg, tree *elts)
9661 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9663 if (TREE_CODE (arg) == VECTOR_CST)
9665 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9666 elts[i] = VECTOR_CST_ELT (arg, i);
9668 else if (TREE_CODE (arg) == CONSTRUCTOR)
9670 constructor_elt *elt;
9672 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9673 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9674 return false;
9675 else
9676 elts[i] = elt->value;
9678 else
9679 return false;
9680 for (; i < nelts; i++)
9681 elts[i]
9682 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9683 return true;
9686 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9687 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9688 NULL_TREE otherwise. */
9690 static tree
9691 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9693 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9694 tree *elts;
9695 bool need_ctor = false;
9697 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9698 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9699 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9700 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9701 return NULL_TREE;
9703 elts = XALLOCAVEC (tree, nelts * 3);
9704 if (!vec_cst_ctor_to_array (arg0, elts)
9705 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9706 return NULL_TREE;
9708 for (i = 0; i < nelts; i++)
9710 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9711 need_ctor = true;
9712 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9715 if (need_ctor)
9717 vec<constructor_elt, va_gc> *v;
9718 vec_alloc (v, nelts);
9719 for (i = 0; i < nelts; i++)
9720 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9721 return build_constructor (type, v);
9723 else
9724 return build_vector (type, &elts[2 * nelts]);
9727 /* Try to fold a pointer difference of type TYPE two address expressions of
9728 array references AREF0 and AREF1 using location LOC. Return a
9729 simplified expression for the difference or NULL_TREE. */
9731 static tree
9732 fold_addr_of_array_ref_difference (location_t loc, tree type,
9733 tree aref0, tree aref1)
9735 tree base0 = TREE_OPERAND (aref0, 0);
9736 tree base1 = TREE_OPERAND (aref1, 0);
9737 tree base_offset = build_int_cst (type, 0);
9739 /* If the bases are array references as well, recurse. If the bases
9740 are pointer indirections compute the difference of the pointers.
9741 If the bases are equal, we are set. */
9742 if ((TREE_CODE (base0) == ARRAY_REF
9743 && TREE_CODE (base1) == ARRAY_REF
9744 && (base_offset
9745 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9746 || (INDIRECT_REF_P (base0)
9747 && INDIRECT_REF_P (base1)
9748 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9749 TREE_OPERAND (base0, 0),
9750 TREE_OPERAND (base1, 0))))
9751 || operand_equal_p (base0, base1, 0))
9753 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9754 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9755 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9756 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9757 return fold_build2_loc (loc, PLUS_EXPR, type,
9758 base_offset,
9759 fold_build2_loc (loc, MULT_EXPR, type,
9760 diff, esz));
9762 return NULL_TREE;
9765 /* If the real or vector real constant CST of type TYPE has an exact
9766 inverse, return it, else return NULL. */
9768 static tree
9769 exact_inverse (tree type, tree cst)
9771 REAL_VALUE_TYPE r;
9772 tree unit_type, *elts;
9773 enum machine_mode mode;
9774 unsigned vec_nelts, i;
9776 switch (TREE_CODE (cst))
9778 case REAL_CST:
9779 r = TREE_REAL_CST (cst);
9781 if (exact_real_inverse (TYPE_MODE (type), &r))
9782 return build_real (type, r);
9784 return NULL_TREE;
9786 case VECTOR_CST:
9787 vec_nelts = VECTOR_CST_NELTS (cst);
9788 elts = XALLOCAVEC (tree, vec_nelts);
9789 unit_type = TREE_TYPE (type);
9790 mode = TYPE_MODE (unit_type);
9792 for (i = 0; i < vec_nelts; i++)
9794 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9795 if (!exact_real_inverse (mode, &r))
9796 return NULL_TREE;
9797 elts[i] = build_real (unit_type, r);
9800 return build_vector (type, elts);
9802 default:
9803 return NULL_TREE;
9807 /* Fold a binary expression of code CODE and type TYPE with operands
9808 OP0 and OP1. LOC is the location of the resulting expression.
9809 Return the folded expression if folding is successful. Otherwise,
9810 return NULL_TREE. */
9812 tree
9813 fold_binary_loc (location_t loc,
9814 enum tree_code code, tree type, tree op0, tree op1)
9816 enum tree_code_class kind = TREE_CODE_CLASS (code);
9817 tree arg0, arg1, tem;
9818 tree t1 = NULL_TREE;
9819 bool strict_overflow_p;
9821 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9822 && TREE_CODE_LENGTH (code) == 2
9823 && op0 != NULL_TREE
9824 && op1 != NULL_TREE);
9826 arg0 = op0;
9827 arg1 = op1;
9829 /* Strip any conversions that don't change the mode. This is
9830 safe for every expression, except for a comparison expression
9831 because its signedness is derived from its operands. So, in
9832 the latter case, only strip conversions that don't change the
9833 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9834 preserved.
9836 Note that this is done as an internal manipulation within the
9837 constant folder, in order to find the simplest representation
9838 of the arguments so that their form can be studied. In any
9839 cases, the appropriate type conversions should be put back in
9840 the tree that will get out of the constant folder. */
9842 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9844 STRIP_SIGN_NOPS (arg0);
9845 STRIP_SIGN_NOPS (arg1);
9847 else
9849 STRIP_NOPS (arg0);
9850 STRIP_NOPS (arg1);
9853 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9854 constant but we can't do arithmetic on them. */
9855 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9856 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9857 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9858 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9859 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9860 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9862 if (kind == tcc_binary)
9864 /* Make sure type and arg0 have the same saturating flag. */
9865 gcc_assert (TYPE_SATURATING (type)
9866 == TYPE_SATURATING (TREE_TYPE (arg0)));
9867 tem = const_binop (code, arg0, arg1);
9869 else if (kind == tcc_comparison)
9870 tem = fold_relational_const (code, type, arg0, arg1);
9871 else
9872 tem = NULL_TREE;
9874 if (tem != NULL_TREE)
9876 if (TREE_TYPE (tem) != type)
9877 tem = fold_convert_loc (loc, type, tem);
9878 return tem;
9882 /* If this is a commutative operation, and ARG0 is a constant, move it
9883 to ARG1 to reduce the number of tests below. */
9884 if (commutative_tree_code (code)
9885 && tree_swap_operands_p (arg0, arg1, true))
9886 return fold_build2_loc (loc, code, type, op1, op0);
9888 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9890 First check for cases where an arithmetic operation is applied to a
9891 compound, conditional, or comparison operation. Push the arithmetic
9892 operation inside the compound or conditional to see if any folding
9893 can then be done. Convert comparison to conditional for this purpose.
9894 The also optimizes non-constant cases that used to be done in
9895 expand_expr.
9897 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9898 one of the operands is a comparison and the other is a comparison, a
9899 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9900 code below would make the expression more complex. Change it to a
9901 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9902 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9904 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9905 || code == EQ_EXPR || code == NE_EXPR)
9906 && TREE_CODE (type) != VECTOR_TYPE
9907 && ((truth_value_p (TREE_CODE (arg0))
9908 && (truth_value_p (TREE_CODE (arg1))
9909 || (TREE_CODE (arg1) == BIT_AND_EXPR
9910 && integer_onep (TREE_OPERAND (arg1, 1)))))
9911 || (truth_value_p (TREE_CODE (arg1))
9912 && (truth_value_p (TREE_CODE (arg0))
9913 || (TREE_CODE (arg0) == BIT_AND_EXPR
9914 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9916 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9917 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9918 : TRUTH_XOR_EXPR,
9919 boolean_type_node,
9920 fold_convert_loc (loc, boolean_type_node, arg0),
9921 fold_convert_loc (loc, boolean_type_node, arg1));
9923 if (code == EQ_EXPR)
9924 tem = invert_truthvalue_loc (loc, tem);
9926 return fold_convert_loc (loc, type, tem);
9929 if (TREE_CODE_CLASS (code) == tcc_binary
9930 || TREE_CODE_CLASS (code) == tcc_comparison)
9932 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9934 tem = fold_build2_loc (loc, code, type,
9935 fold_convert_loc (loc, TREE_TYPE (op0),
9936 TREE_OPERAND (arg0, 1)), op1);
9937 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9938 tem);
9940 if (TREE_CODE (arg1) == COMPOUND_EXPR
9941 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9943 tem = fold_build2_loc (loc, code, type, op0,
9944 fold_convert_loc (loc, TREE_TYPE (op1),
9945 TREE_OPERAND (arg1, 1)));
9946 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9947 tem);
9950 if (TREE_CODE (arg0) == COND_EXPR
9951 || TREE_CODE (arg0) == VEC_COND_EXPR
9952 || COMPARISON_CLASS_P (arg0))
9954 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9955 arg0, arg1,
9956 /*cond_first_p=*/1);
9957 if (tem != NULL_TREE)
9958 return tem;
9961 if (TREE_CODE (arg1) == COND_EXPR
9962 || TREE_CODE (arg1) == VEC_COND_EXPR
9963 || COMPARISON_CLASS_P (arg1))
9965 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9966 arg1, arg0,
9967 /*cond_first_p=*/0);
9968 if (tem != NULL_TREE)
9969 return tem;
9973 switch (code)
9975 case MEM_REF:
9976 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9977 if (TREE_CODE (arg0) == ADDR_EXPR
9978 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9980 tree iref = TREE_OPERAND (arg0, 0);
9981 return fold_build2 (MEM_REF, type,
9982 TREE_OPERAND (iref, 0),
9983 int_const_binop (PLUS_EXPR, arg1,
9984 TREE_OPERAND (iref, 1)));
9987 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9988 if (TREE_CODE (arg0) == ADDR_EXPR
9989 && handled_component_p (TREE_OPERAND (arg0, 0)))
9991 tree base;
9992 HOST_WIDE_INT coffset;
9993 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9994 &coffset);
9995 if (!base)
9996 return NULL_TREE;
9997 return fold_build2 (MEM_REF, type,
9998 build_fold_addr_expr (base),
9999 int_const_binop (PLUS_EXPR, arg1,
10000 size_int (coffset)));
10003 return NULL_TREE;
10005 case POINTER_PLUS_EXPR:
10006 /* 0 +p index -> (type)index */
10007 if (integer_zerop (arg0))
10008 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10010 /* PTR +p 0 -> PTR */
10011 if (integer_zerop (arg1))
10012 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10014 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10015 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10016 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10017 return fold_convert_loc (loc, type,
10018 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10019 fold_convert_loc (loc, sizetype,
10020 arg1),
10021 fold_convert_loc (loc, sizetype,
10022 arg0)));
10024 /* (PTR +p B) +p A -> PTR +p (B + A) */
10025 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10027 tree inner;
10028 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10029 tree arg00 = TREE_OPERAND (arg0, 0);
10030 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10031 arg01, fold_convert_loc (loc, sizetype, arg1));
10032 return fold_convert_loc (loc, type,
10033 fold_build_pointer_plus_loc (loc,
10034 arg00, inner));
10037 /* PTR_CST +p CST -> CST1 */
10038 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10039 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10040 fold_convert_loc (loc, type, arg1));
10042 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10043 of the array. Loop optimizer sometimes produce this type of
10044 expressions. */
10045 if (TREE_CODE (arg0) == ADDR_EXPR)
10047 tem = try_move_mult_to_index (loc, arg0,
10048 fold_convert_loc (loc,
10049 ssizetype, arg1));
10050 if (tem)
10051 return fold_convert_loc (loc, type, tem);
10054 return NULL_TREE;
10056 case PLUS_EXPR:
10057 /* A + (-B) -> A - B */
10058 if (TREE_CODE (arg1) == NEGATE_EXPR)
10059 return fold_build2_loc (loc, MINUS_EXPR, type,
10060 fold_convert_loc (loc, type, arg0),
10061 fold_convert_loc (loc, type,
10062 TREE_OPERAND (arg1, 0)));
10063 /* (-A) + B -> B - A */
10064 if (TREE_CODE (arg0) == NEGATE_EXPR
10065 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10066 return fold_build2_loc (loc, MINUS_EXPR, type,
10067 fold_convert_loc (loc, type, arg1),
10068 fold_convert_loc (loc, type,
10069 TREE_OPERAND (arg0, 0)));
10071 if (INTEGRAL_TYPE_P (type))
10073 /* Convert ~A + 1 to -A. */
10074 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10075 && integer_onep (arg1))
10076 return fold_build1_loc (loc, NEGATE_EXPR, type,
10077 fold_convert_loc (loc, type,
10078 TREE_OPERAND (arg0, 0)));
10080 /* ~X + X is -1. */
10081 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10082 && !TYPE_OVERFLOW_TRAPS (type))
10084 tree tem = TREE_OPERAND (arg0, 0);
10086 STRIP_NOPS (tem);
10087 if (operand_equal_p (tem, arg1, 0))
10089 t1 = build_int_cst_type (type, -1);
10090 return omit_one_operand_loc (loc, type, t1, arg1);
10094 /* X + ~X is -1. */
10095 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10096 && !TYPE_OVERFLOW_TRAPS (type))
10098 tree tem = TREE_OPERAND (arg1, 0);
10100 STRIP_NOPS (tem);
10101 if (operand_equal_p (arg0, tem, 0))
10103 t1 = build_int_cst_type (type, -1);
10104 return omit_one_operand_loc (loc, type, t1, arg0);
10108 /* X + (X / CST) * -CST is X % CST. */
10109 if (TREE_CODE (arg1) == MULT_EXPR
10110 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10111 && operand_equal_p (arg0,
10112 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10114 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10115 tree cst1 = TREE_OPERAND (arg1, 1);
10116 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10117 cst1, cst0);
10118 if (sum && integer_zerop (sum))
10119 return fold_convert_loc (loc, type,
10120 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10121 TREE_TYPE (arg0), arg0,
10122 cst0));
10126 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10127 one. Make sure the type is not saturating and has the signedness of
10128 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10129 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10130 if ((TREE_CODE (arg0) == MULT_EXPR
10131 || TREE_CODE (arg1) == MULT_EXPR)
10132 && !TYPE_SATURATING (type)
10133 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10134 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10135 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10137 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10138 if (tem)
10139 return tem;
10142 if (! FLOAT_TYPE_P (type))
10144 if (integer_zerop (arg1))
10145 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10147 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10148 with a constant, and the two constants have no bits in common,
10149 we should treat this as a BIT_IOR_EXPR since this may produce more
10150 simplifications. */
10151 if (TREE_CODE (arg0) == BIT_AND_EXPR
10152 && TREE_CODE (arg1) == BIT_AND_EXPR
10153 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10154 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10155 && integer_zerop (const_binop (BIT_AND_EXPR,
10156 TREE_OPERAND (arg0, 1),
10157 TREE_OPERAND (arg1, 1))))
10159 code = BIT_IOR_EXPR;
10160 goto bit_ior;
10163 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10164 (plus (plus (mult) (mult)) (foo)) so that we can
10165 take advantage of the factoring cases below. */
10166 if (TYPE_OVERFLOW_WRAPS (type)
10167 && (((TREE_CODE (arg0) == PLUS_EXPR
10168 || TREE_CODE (arg0) == MINUS_EXPR)
10169 && TREE_CODE (arg1) == MULT_EXPR)
10170 || ((TREE_CODE (arg1) == PLUS_EXPR
10171 || TREE_CODE (arg1) == MINUS_EXPR)
10172 && TREE_CODE (arg0) == MULT_EXPR)))
10174 tree parg0, parg1, parg, marg;
10175 enum tree_code pcode;
10177 if (TREE_CODE (arg1) == MULT_EXPR)
10178 parg = arg0, marg = arg1;
10179 else
10180 parg = arg1, marg = arg0;
10181 pcode = TREE_CODE (parg);
10182 parg0 = TREE_OPERAND (parg, 0);
10183 parg1 = TREE_OPERAND (parg, 1);
10184 STRIP_NOPS (parg0);
10185 STRIP_NOPS (parg1);
10187 if (TREE_CODE (parg0) == MULT_EXPR
10188 && TREE_CODE (parg1) != MULT_EXPR)
10189 return fold_build2_loc (loc, pcode, type,
10190 fold_build2_loc (loc, PLUS_EXPR, type,
10191 fold_convert_loc (loc, type,
10192 parg0),
10193 fold_convert_loc (loc, type,
10194 marg)),
10195 fold_convert_loc (loc, type, parg1));
10196 if (TREE_CODE (parg0) != MULT_EXPR
10197 && TREE_CODE (parg1) == MULT_EXPR)
10198 return
10199 fold_build2_loc (loc, PLUS_EXPR, type,
10200 fold_convert_loc (loc, type, parg0),
10201 fold_build2_loc (loc, pcode, type,
10202 fold_convert_loc (loc, type, marg),
10203 fold_convert_loc (loc, type,
10204 parg1)));
10207 else
10209 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10210 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10211 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10213 /* Likewise if the operands are reversed. */
10214 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10215 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10217 /* Convert X + -C into X - C. */
10218 if (TREE_CODE (arg1) == REAL_CST
10219 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10221 tem = fold_negate_const (arg1, type);
10222 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10223 return fold_build2_loc (loc, MINUS_EXPR, type,
10224 fold_convert_loc (loc, type, arg0),
10225 fold_convert_loc (loc, type, tem));
10228 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10229 to __complex__ ( x, y ). This is not the same for SNaNs or
10230 if signed zeros are involved. */
10231 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10232 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10233 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10235 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10236 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10237 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10238 bool arg0rz = false, arg0iz = false;
10239 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10240 || (arg0i && (arg0iz = real_zerop (arg0i))))
10242 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10243 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10244 if (arg0rz && arg1i && real_zerop (arg1i))
10246 tree rp = arg1r ? arg1r
10247 : build1 (REALPART_EXPR, rtype, arg1);
10248 tree ip = arg0i ? arg0i
10249 : build1 (IMAGPART_EXPR, rtype, arg0);
10250 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10252 else if (arg0iz && arg1r && real_zerop (arg1r))
10254 tree rp = arg0r ? arg0r
10255 : build1 (REALPART_EXPR, rtype, arg0);
10256 tree ip = arg1i ? arg1i
10257 : build1 (IMAGPART_EXPR, rtype, arg1);
10258 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10263 if (flag_unsafe_math_optimizations
10264 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10265 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10266 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10267 return tem;
10269 /* Convert x+x into x*2.0. */
10270 if (operand_equal_p (arg0, arg1, 0)
10271 && SCALAR_FLOAT_TYPE_P (type))
10272 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10273 build_real (type, dconst2));
10275 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10276 We associate floats only if the user has specified
10277 -fassociative-math. */
10278 if (flag_associative_math
10279 && TREE_CODE (arg1) == PLUS_EXPR
10280 && TREE_CODE (arg0) != MULT_EXPR)
10282 tree tree10 = TREE_OPERAND (arg1, 0);
10283 tree tree11 = TREE_OPERAND (arg1, 1);
10284 if (TREE_CODE (tree11) == MULT_EXPR
10285 && TREE_CODE (tree10) == MULT_EXPR)
10287 tree tree0;
10288 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10289 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10292 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10293 We associate floats only if the user has specified
10294 -fassociative-math. */
10295 if (flag_associative_math
10296 && TREE_CODE (arg0) == PLUS_EXPR
10297 && TREE_CODE (arg1) != MULT_EXPR)
10299 tree tree00 = TREE_OPERAND (arg0, 0);
10300 tree tree01 = TREE_OPERAND (arg0, 1);
10301 if (TREE_CODE (tree01) == MULT_EXPR
10302 && TREE_CODE (tree00) == MULT_EXPR)
10304 tree tree0;
10305 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10306 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10311 bit_rotate:
10312 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10313 is a rotate of A by C1 bits. */
10314 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10315 is a rotate of A by B bits. */
10317 enum tree_code code0, code1;
10318 tree rtype;
10319 code0 = TREE_CODE (arg0);
10320 code1 = TREE_CODE (arg1);
10321 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10322 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10323 && operand_equal_p (TREE_OPERAND (arg0, 0),
10324 TREE_OPERAND (arg1, 0), 0)
10325 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10326 TYPE_UNSIGNED (rtype))
10327 /* Only create rotates in complete modes. Other cases are not
10328 expanded properly. */
10329 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10331 tree tree01, tree11;
10332 enum tree_code code01, code11;
10334 tree01 = TREE_OPERAND (arg0, 1);
10335 tree11 = TREE_OPERAND (arg1, 1);
10336 STRIP_NOPS (tree01);
10337 STRIP_NOPS (tree11);
10338 code01 = TREE_CODE (tree01);
10339 code11 = TREE_CODE (tree11);
10340 if (code01 == INTEGER_CST
10341 && code11 == INTEGER_CST
10342 && TREE_INT_CST_HIGH (tree01) == 0
10343 && TREE_INT_CST_HIGH (tree11) == 0
10344 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10345 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10347 tem = build2_loc (loc, LROTATE_EXPR,
10348 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10349 TREE_OPERAND (arg0, 0),
10350 code0 == LSHIFT_EXPR ? tree01 : tree11);
10351 return fold_convert_loc (loc, type, tem);
10353 else if (code11 == MINUS_EXPR)
10355 tree tree110, tree111;
10356 tree110 = TREE_OPERAND (tree11, 0);
10357 tree111 = TREE_OPERAND (tree11, 1);
10358 STRIP_NOPS (tree110);
10359 STRIP_NOPS (tree111);
10360 if (TREE_CODE (tree110) == INTEGER_CST
10361 && 0 == compare_tree_int (tree110,
10362 TYPE_PRECISION
10363 (TREE_TYPE (TREE_OPERAND
10364 (arg0, 0))))
10365 && operand_equal_p (tree01, tree111, 0))
10366 return
10367 fold_convert_loc (loc, type,
10368 build2 ((code0 == LSHIFT_EXPR
10369 ? LROTATE_EXPR
10370 : RROTATE_EXPR),
10371 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10372 TREE_OPERAND (arg0, 0), tree01));
10374 else if (code01 == MINUS_EXPR)
10376 tree tree010, tree011;
10377 tree010 = TREE_OPERAND (tree01, 0);
10378 tree011 = TREE_OPERAND (tree01, 1);
10379 STRIP_NOPS (tree010);
10380 STRIP_NOPS (tree011);
10381 if (TREE_CODE (tree010) == INTEGER_CST
10382 && 0 == compare_tree_int (tree010,
10383 TYPE_PRECISION
10384 (TREE_TYPE (TREE_OPERAND
10385 (arg0, 0))))
10386 && operand_equal_p (tree11, tree011, 0))
10387 return fold_convert_loc
10388 (loc, type,
10389 build2 ((code0 != LSHIFT_EXPR
10390 ? LROTATE_EXPR
10391 : RROTATE_EXPR),
10392 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10393 TREE_OPERAND (arg0, 0), tree11));
10398 associate:
10399 /* In most languages, can't associate operations on floats through
10400 parentheses. Rather than remember where the parentheses were, we
10401 don't associate floats at all, unless the user has specified
10402 -fassociative-math.
10403 And, we need to make sure type is not saturating. */
10405 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10406 && !TYPE_SATURATING (type))
10408 tree var0, con0, lit0, minus_lit0;
10409 tree var1, con1, lit1, minus_lit1;
10410 tree atype = type;
10411 bool ok = true;
10413 /* Split both trees into variables, constants, and literals. Then
10414 associate each group together, the constants with literals,
10415 then the result with variables. This increases the chances of
10416 literals being recombined later and of generating relocatable
10417 expressions for the sum of a constant and literal. */
10418 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10419 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10420 code == MINUS_EXPR);
10422 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10423 if (code == MINUS_EXPR)
10424 code = PLUS_EXPR;
10426 /* With undefined overflow prefer doing association in a type
10427 which wraps on overflow, if that is one of the operand types. */
10428 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10429 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10431 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10432 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10433 atype = TREE_TYPE (arg0);
10434 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10435 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10436 atype = TREE_TYPE (arg1);
10437 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10440 /* With undefined overflow we can only associate constants with one
10441 variable, and constants whose association doesn't overflow. */
10442 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10443 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10445 if (var0 && var1)
10447 tree tmp0 = var0;
10448 tree tmp1 = var1;
10450 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10451 tmp0 = TREE_OPERAND (tmp0, 0);
10452 if (CONVERT_EXPR_P (tmp0)
10453 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10454 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10455 <= TYPE_PRECISION (atype)))
10456 tmp0 = TREE_OPERAND (tmp0, 0);
10457 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10458 tmp1 = TREE_OPERAND (tmp1, 0);
10459 if (CONVERT_EXPR_P (tmp1)
10460 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10461 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10462 <= TYPE_PRECISION (atype)))
10463 tmp1 = TREE_OPERAND (tmp1, 0);
10464 /* The only case we can still associate with two variables
10465 is if they are the same, modulo negation and bit-pattern
10466 preserving conversions. */
10467 if (!operand_equal_p (tmp0, tmp1, 0))
10468 ok = false;
10472 /* Only do something if we found more than two objects. Otherwise,
10473 nothing has changed and we risk infinite recursion. */
10474 if (ok
10475 && (2 < ((var0 != 0) + (var1 != 0)
10476 + (con0 != 0) + (con1 != 0)
10477 + (lit0 != 0) + (lit1 != 0)
10478 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10480 bool any_overflows = false;
10481 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10482 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10483 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10484 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10485 var0 = associate_trees (loc, var0, var1, code, atype);
10486 con0 = associate_trees (loc, con0, con1, code, atype);
10487 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10488 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10489 code, atype);
10491 /* Preserve the MINUS_EXPR if the negative part of the literal is
10492 greater than the positive part. Otherwise, the multiplicative
10493 folding code (i.e extract_muldiv) may be fooled in case
10494 unsigned constants are subtracted, like in the following
10495 example: ((X*2 + 4) - 8U)/2. */
10496 if (minus_lit0 && lit0)
10498 if (TREE_CODE (lit0) == INTEGER_CST
10499 && TREE_CODE (minus_lit0) == INTEGER_CST
10500 && tree_int_cst_lt (lit0, minus_lit0))
10502 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10503 MINUS_EXPR, atype);
10504 lit0 = 0;
10506 else
10508 lit0 = associate_trees (loc, lit0, minus_lit0,
10509 MINUS_EXPR, atype);
10510 minus_lit0 = 0;
10514 /* Don't introduce overflows through reassociation. */
10515 if (!any_overflows
10516 && ((lit0 && TREE_OVERFLOW (lit0))
10517 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10518 return NULL_TREE;
10520 if (minus_lit0)
10522 if (con0 == 0)
10523 return
10524 fold_convert_loc (loc, type,
10525 associate_trees (loc, var0, minus_lit0,
10526 MINUS_EXPR, atype));
10527 else
10529 con0 = associate_trees (loc, con0, minus_lit0,
10530 MINUS_EXPR, atype);
10531 return
10532 fold_convert_loc (loc, type,
10533 associate_trees (loc, var0, con0,
10534 PLUS_EXPR, atype));
10538 con0 = associate_trees (loc, con0, lit0, code, atype);
10539 return
10540 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10541 code, atype));
10545 return NULL_TREE;
10547 case MINUS_EXPR:
10548 /* Pointer simplifications for subtraction, simple reassociations. */
10549 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10551 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10552 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10553 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10555 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10556 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10557 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10558 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10559 return fold_build2_loc (loc, PLUS_EXPR, type,
10560 fold_build2_loc (loc, MINUS_EXPR, type,
10561 arg00, arg10),
10562 fold_build2_loc (loc, MINUS_EXPR, type,
10563 arg01, arg11));
10565 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10566 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10568 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10569 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10570 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10571 fold_convert_loc (loc, type, arg1));
10572 if (tmp)
10573 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10576 /* A - (-B) -> A + B */
10577 if (TREE_CODE (arg1) == NEGATE_EXPR)
10578 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10579 fold_convert_loc (loc, type,
10580 TREE_OPERAND (arg1, 0)));
10581 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10582 if (TREE_CODE (arg0) == NEGATE_EXPR
10583 && (FLOAT_TYPE_P (type)
10584 || INTEGRAL_TYPE_P (type))
10585 && negate_expr_p (arg1)
10586 && reorder_operands_p (arg0, arg1))
10587 return fold_build2_loc (loc, MINUS_EXPR, type,
10588 fold_convert_loc (loc, type,
10589 negate_expr (arg1)),
10590 fold_convert_loc (loc, type,
10591 TREE_OPERAND (arg0, 0)));
10592 /* Convert -A - 1 to ~A. */
10593 if (INTEGRAL_TYPE_P (type)
10594 && TREE_CODE (arg0) == NEGATE_EXPR
10595 && integer_onep (arg1)
10596 && !TYPE_OVERFLOW_TRAPS (type))
10597 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10598 fold_convert_loc (loc, type,
10599 TREE_OPERAND (arg0, 0)));
10601 /* Convert -1 - A to ~A. */
10602 if (INTEGRAL_TYPE_P (type)
10603 && integer_all_onesp (arg0))
10604 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10607 /* X - (X / CST) * CST is X % CST. */
10608 if (INTEGRAL_TYPE_P (type)
10609 && TREE_CODE (arg1) == MULT_EXPR
10610 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10611 && operand_equal_p (arg0,
10612 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10613 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10614 TREE_OPERAND (arg1, 1), 0))
10615 return
10616 fold_convert_loc (loc, type,
10617 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10618 arg0, TREE_OPERAND (arg1, 1)));
10620 if (! FLOAT_TYPE_P (type))
10622 if (integer_zerop (arg0))
10623 return negate_expr (fold_convert_loc (loc, type, arg1));
10624 if (integer_zerop (arg1))
10625 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10627 /* Fold A - (A & B) into ~B & A. */
10628 if (!TREE_SIDE_EFFECTS (arg0)
10629 && TREE_CODE (arg1) == BIT_AND_EXPR)
10631 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10633 tree arg10 = fold_convert_loc (loc, type,
10634 TREE_OPERAND (arg1, 0));
10635 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10636 fold_build1_loc (loc, BIT_NOT_EXPR,
10637 type, arg10),
10638 fold_convert_loc (loc, type, arg0));
10640 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10642 tree arg11 = fold_convert_loc (loc,
10643 type, TREE_OPERAND (arg1, 1));
10644 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10645 fold_build1_loc (loc, BIT_NOT_EXPR,
10646 type, arg11),
10647 fold_convert_loc (loc, type, arg0));
10651 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10652 any power of 2 minus 1. */
10653 if (TREE_CODE (arg0) == BIT_AND_EXPR
10654 && TREE_CODE (arg1) == BIT_AND_EXPR
10655 && operand_equal_p (TREE_OPERAND (arg0, 0),
10656 TREE_OPERAND (arg1, 0), 0))
10658 tree mask0 = TREE_OPERAND (arg0, 1);
10659 tree mask1 = TREE_OPERAND (arg1, 1);
10660 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10662 if (operand_equal_p (tem, mask1, 0))
10664 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10665 TREE_OPERAND (arg0, 0), mask1);
10666 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10671 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10672 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10673 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10675 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10676 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10677 (-ARG1 + ARG0) reduces to -ARG1. */
10678 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10679 return negate_expr (fold_convert_loc (loc, type, arg1));
10681 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10682 __complex__ ( x, -y ). This is not the same for SNaNs or if
10683 signed zeros are involved. */
10684 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10685 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10686 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10688 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10689 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10690 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10691 bool arg0rz = false, arg0iz = false;
10692 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10693 || (arg0i && (arg0iz = real_zerop (arg0i))))
10695 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10696 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10697 if (arg0rz && arg1i && real_zerop (arg1i))
10699 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10700 arg1r ? arg1r
10701 : build1 (REALPART_EXPR, rtype, arg1));
10702 tree ip = arg0i ? arg0i
10703 : build1 (IMAGPART_EXPR, rtype, arg0);
10704 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10706 else if (arg0iz && arg1r && real_zerop (arg1r))
10708 tree rp = arg0r ? arg0r
10709 : build1 (REALPART_EXPR, rtype, arg0);
10710 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10711 arg1i ? arg1i
10712 : build1 (IMAGPART_EXPR, rtype, arg1));
10713 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10718 /* Fold &x - &x. This can happen from &x.foo - &x.
10719 This is unsafe for certain floats even in non-IEEE formats.
10720 In IEEE, it is unsafe because it does wrong for NaNs.
10721 Also note that operand_equal_p is always false if an operand
10722 is volatile. */
10724 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10725 && operand_equal_p (arg0, arg1, 0))
10726 return build_zero_cst (type);
10728 /* A - B -> A + (-B) if B is easily negatable. */
10729 if (negate_expr_p (arg1)
10730 && ((FLOAT_TYPE_P (type)
10731 /* Avoid this transformation if B is a positive REAL_CST. */
10732 && (TREE_CODE (arg1) != REAL_CST
10733 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10734 || INTEGRAL_TYPE_P (type)))
10735 return fold_build2_loc (loc, PLUS_EXPR, type,
10736 fold_convert_loc (loc, type, arg0),
10737 fold_convert_loc (loc, type,
10738 negate_expr (arg1)));
10740 /* Try folding difference of addresses. */
10742 HOST_WIDE_INT diff;
10744 if ((TREE_CODE (arg0) == ADDR_EXPR
10745 || TREE_CODE (arg1) == ADDR_EXPR)
10746 && ptr_difference_const (arg0, arg1, &diff))
10747 return build_int_cst_type (type, diff);
10750 /* Fold &a[i] - &a[j] to i-j. */
10751 if (TREE_CODE (arg0) == ADDR_EXPR
10752 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10753 && TREE_CODE (arg1) == ADDR_EXPR
10754 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10756 tree tem = fold_addr_of_array_ref_difference (loc, type,
10757 TREE_OPERAND (arg0, 0),
10758 TREE_OPERAND (arg1, 0));
10759 if (tem)
10760 return tem;
10763 if (FLOAT_TYPE_P (type)
10764 && flag_unsafe_math_optimizations
10765 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10766 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10767 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10768 return tem;
10770 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10771 one. Make sure the type is not saturating and has the signedness of
10772 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10773 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10774 if ((TREE_CODE (arg0) == MULT_EXPR
10775 || TREE_CODE (arg1) == MULT_EXPR)
10776 && !TYPE_SATURATING (type)
10777 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10778 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10779 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10781 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10782 if (tem)
10783 return tem;
10786 goto associate;
10788 case MULT_EXPR:
10789 /* (-A) * (-B) -> A * B */
10790 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10791 return fold_build2_loc (loc, MULT_EXPR, type,
10792 fold_convert_loc (loc, type,
10793 TREE_OPERAND (arg0, 0)),
10794 fold_convert_loc (loc, type,
10795 negate_expr (arg1)));
10796 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10797 return fold_build2_loc (loc, MULT_EXPR, type,
10798 fold_convert_loc (loc, type,
10799 negate_expr (arg0)),
10800 fold_convert_loc (loc, type,
10801 TREE_OPERAND (arg1, 0)));
10803 if (! FLOAT_TYPE_P (type))
10805 if (integer_zerop (arg1))
10806 return omit_one_operand_loc (loc, type, arg1, arg0);
10807 if (integer_onep (arg1))
10808 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10809 /* Transform x * -1 into -x. Make sure to do the negation
10810 on the original operand with conversions not stripped
10811 because we can only strip non-sign-changing conversions. */
10812 if (integer_all_onesp (arg1))
10813 return fold_convert_loc (loc, type, negate_expr (op0));
10814 /* Transform x * -C into -x * C if x is easily negatable. */
10815 if (TREE_CODE (arg1) == INTEGER_CST
10816 && tree_int_cst_sgn (arg1) == -1
10817 && negate_expr_p (arg0)
10818 && (tem = negate_expr (arg1)) != arg1
10819 && !TREE_OVERFLOW (tem))
10820 return fold_build2_loc (loc, MULT_EXPR, type,
10821 fold_convert_loc (loc, type,
10822 negate_expr (arg0)),
10823 tem);
10825 /* (a * (1 << b)) is (a << b) */
10826 if (TREE_CODE (arg1) == LSHIFT_EXPR
10827 && integer_onep (TREE_OPERAND (arg1, 0)))
10828 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10829 TREE_OPERAND (arg1, 1));
10830 if (TREE_CODE (arg0) == LSHIFT_EXPR
10831 && integer_onep (TREE_OPERAND (arg0, 0)))
10832 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10833 TREE_OPERAND (arg0, 1));
10835 /* (A + A) * C -> A * 2 * C */
10836 if (TREE_CODE (arg0) == PLUS_EXPR
10837 && TREE_CODE (arg1) == INTEGER_CST
10838 && operand_equal_p (TREE_OPERAND (arg0, 0),
10839 TREE_OPERAND (arg0, 1), 0))
10840 return fold_build2_loc (loc, MULT_EXPR, type,
10841 omit_one_operand_loc (loc, type,
10842 TREE_OPERAND (arg0, 0),
10843 TREE_OPERAND (arg0, 1)),
10844 fold_build2_loc (loc, MULT_EXPR, type,
10845 build_int_cst (type, 2) , arg1));
10847 strict_overflow_p = false;
10848 if (TREE_CODE (arg1) == INTEGER_CST
10849 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10850 &strict_overflow_p)))
10852 if (strict_overflow_p)
10853 fold_overflow_warning (("assuming signed overflow does not "
10854 "occur when simplifying "
10855 "multiplication"),
10856 WARN_STRICT_OVERFLOW_MISC);
10857 return fold_convert_loc (loc, type, tem);
10860 /* Optimize z * conj(z) for integer complex numbers. */
10861 if (TREE_CODE (arg0) == CONJ_EXPR
10862 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10863 return fold_mult_zconjz (loc, type, arg1);
10864 if (TREE_CODE (arg1) == CONJ_EXPR
10865 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10866 return fold_mult_zconjz (loc, type, arg0);
10868 else
10870 /* Maybe fold x * 0 to 0. The expressions aren't the same
10871 when x is NaN, since x * 0 is also NaN. Nor are they the
10872 same in modes with signed zeros, since multiplying a
10873 negative value by 0 gives -0, not +0. */
10874 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10875 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10876 && real_zerop (arg1))
10877 return omit_one_operand_loc (loc, type, arg1, arg0);
10878 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10879 Likewise for complex arithmetic with signed zeros. */
10880 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10881 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10882 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10883 && real_onep (arg1))
10884 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10886 /* Transform x * -1.0 into -x. */
10887 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10888 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10889 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10890 && real_minus_onep (arg1))
10891 return fold_convert_loc (loc, type, negate_expr (arg0));
10893 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10894 the result for floating point types due to rounding so it is applied
10895 only if -fassociative-math was specify. */
10896 if (flag_associative_math
10897 && TREE_CODE (arg0) == RDIV_EXPR
10898 && TREE_CODE (arg1) == REAL_CST
10899 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10901 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10902 arg1);
10903 if (tem)
10904 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10905 TREE_OPERAND (arg0, 1));
10908 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10909 if (operand_equal_p (arg0, arg1, 0))
10911 tree tem = fold_strip_sign_ops (arg0);
10912 if (tem != NULL_TREE)
10914 tem = fold_convert_loc (loc, type, tem);
10915 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10919 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10920 This is not the same for NaNs or if signed zeros are
10921 involved. */
10922 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10923 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10924 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10925 && TREE_CODE (arg1) == COMPLEX_CST
10926 && real_zerop (TREE_REALPART (arg1)))
10928 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10929 if (real_onep (TREE_IMAGPART (arg1)))
10930 return
10931 fold_build2_loc (loc, COMPLEX_EXPR, type,
10932 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10933 rtype, arg0)),
10934 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10935 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10936 return
10937 fold_build2_loc (loc, COMPLEX_EXPR, type,
10938 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10939 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10940 rtype, arg0)));
10943 /* Optimize z * conj(z) for floating point complex numbers.
10944 Guarded by flag_unsafe_math_optimizations as non-finite
10945 imaginary components don't produce scalar results. */
10946 if (flag_unsafe_math_optimizations
10947 && TREE_CODE (arg0) == CONJ_EXPR
10948 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10949 return fold_mult_zconjz (loc, type, arg1);
10950 if (flag_unsafe_math_optimizations
10951 && TREE_CODE (arg1) == CONJ_EXPR
10952 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10953 return fold_mult_zconjz (loc, type, arg0);
10955 if (flag_unsafe_math_optimizations)
10957 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10958 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10960 /* Optimizations of root(...)*root(...). */
10961 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10963 tree rootfn, arg;
10964 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10965 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10967 /* Optimize sqrt(x)*sqrt(x) as x. */
10968 if (BUILTIN_SQRT_P (fcode0)
10969 && operand_equal_p (arg00, arg10, 0)
10970 && ! HONOR_SNANS (TYPE_MODE (type)))
10971 return arg00;
10973 /* Optimize root(x)*root(y) as root(x*y). */
10974 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10975 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10976 return build_call_expr_loc (loc, rootfn, 1, arg);
10979 /* Optimize expN(x)*expN(y) as expN(x+y). */
10980 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10982 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10983 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10984 CALL_EXPR_ARG (arg0, 0),
10985 CALL_EXPR_ARG (arg1, 0));
10986 return build_call_expr_loc (loc, expfn, 1, arg);
10989 /* Optimizations of pow(...)*pow(...). */
10990 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10991 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10992 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10994 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10995 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10996 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10997 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10999 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11000 if (operand_equal_p (arg01, arg11, 0))
11002 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11003 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11004 arg00, arg10);
11005 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11008 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11009 if (operand_equal_p (arg00, arg10, 0))
11011 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11012 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11013 arg01, arg11);
11014 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11018 /* Optimize tan(x)*cos(x) as sin(x). */
11019 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11020 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11021 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11022 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11023 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11024 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11025 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11026 CALL_EXPR_ARG (arg1, 0), 0))
11028 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11030 if (sinfn != NULL_TREE)
11031 return build_call_expr_loc (loc, sinfn, 1,
11032 CALL_EXPR_ARG (arg0, 0));
11035 /* Optimize x*pow(x,c) as pow(x,c+1). */
11036 if (fcode1 == BUILT_IN_POW
11037 || fcode1 == BUILT_IN_POWF
11038 || fcode1 == BUILT_IN_POWL)
11040 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11041 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11042 if (TREE_CODE (arg11) == REAL_CST
11043 && !TREE_OVERFLOW (arg11)
11044 && operand_equal_p (arg0, arg10, 0))
11046 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11047 REAL_VALUE_TYPE c;
11048 tree arg;
11050 c = TREE_REAL_CST (arg11);
11051 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11052 arg = build_real (type, c);
11053 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11057 /* Optimize pow(x,c)*x as pow(x,c+1). */
11058 if (fcode0 == BUILT_IN_POW
11059 || fcode0 == BUILT_IN_POWF
11060 || fcode0 == BUILT_IN_POWL)
11062 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11063 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11064 if (TREE_CODE (arg01) == REAL_CST
11065 && !TREE_OVERFLOW (arg01)
11066 && operand_equal_p (arg1, arg00, 0))
11068 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11069 REAL_VALUE_TYPE c;
11070 tree arg;
11072 c = TREE_REAL_CST (arg01);
11073 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11074 arg = build_real (type, c);
11075 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11079 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11080 if (!in_gimple_form
11081 && optimize
11082 && operand_equal_p (arg0, arg1, 0))
11084 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11086 if (powfn)
11088 tree arg = build_real (type, dconst2);
11089 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11094 goto associate;
11096 case BIT_IOR_EXPR:
11097 bit_ior:
11098 if (integer_all_onesp (arg1))
11099 return omit_one_operand_loc (loc, type, arg1, arg0);
11100 if (integer_zerop (arg1))
11101 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11102 if (operand_equal_p (arg0, arg1, 0))
11103 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11105 /* ~X | X is -1. */
11106 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11107 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11109 t1 = build_zero_cst (type);
11110 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11111 return omit_one_operand_loc (loc, type, t1, arg1);
11114 /* X | ~X is -1. */
11115 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11116 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11118 t1 = build_zero_cst (type);
11119 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11120 return omit_one_operand_loc (loc, type, t1, arg0);
11123 /* Canonicalize (X & C1) | C2. */
11124 if (TREE_CODE (arg0) == BIT_AND_EXPR
11125 && TREE_CODE (arg1) == INTEGER_CST
11126 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11128 double_int c1, c2, c3, msk;
11129 int width = TYPE_PRECISION (type), w;
11130 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11131 c2 = tree_to_double_int (arg1);
11133 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11134 if ((c1 & c2) == c1)
11135 return omit_one_operand_loc (loc, type, arg1,
11136 TREE_OPERAND (arg0, 0));
11138 msk = double_int::mask (width);
11140 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11141 if (msk.and_not (c1 | c2).is_zero ())
11142 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11143 TREE_OPERAND (arg0, 0), arg1);
11145 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11146 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11147 mode which allows further optimizations. */
11148 c1 &= msk;
11149 c2 &= msk;
11150 c3 = c1.and_not (c2);
11151 for (w = BITS_PER_UNIT;
11152 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11153 w <<= 1)
11155 unsigned HOST_WIDE_INT mask
11156 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11157 if (((c1.low | c2.low) & mask) == mask
11158 && (c1.low & ~mask) == 0 && c1.high == 0)
11160 c3 = double_int::from_uhwi (mask);
11161 break;
11164 if (c3 != c1)
11165 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11166 fold_build2_loc (loc, BIT_AND_EXPR, type,
11167 TREE_OPERAND (arg0, 0),
11168 double_int_to_tree (type,
11169 c3)),
11170 arg1);
11173 /* (X & Y) | Y is (X, Y). */
11174 if (TREE_CODE (arg0) == BIT_AND_EXPR
11175 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11176 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11177 /* (X & Y) | X is (Y, X). */
11178 if (TREE_CODE (arg0) == BIT_AND_EXPR
11179 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11180 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11181 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11182 /* X | (X & Y) is (Y, X). */
11183 if (TREE_CODE (arg1) == BIT_AND_EXPR
11184 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11185 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11186 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11187 /* X | (Y & X) is (Y, X). */
11188 if (TREE_CODE (arg1) == BIT_AND_EXPR
11189 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11190 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11191 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11193 /* (X & ~Y) | (~X & Y) is X ^ Y */
11194 if (TREE_CODE (arg0) == BIT_AND_EXPR
11195 && TREE_CODE (arg1) == BIT_AND_EXPR)
11197 tree a0, a1, l0, l1, n0, n1;
11199 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11200 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11202 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11203 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11205 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11206 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11208 if ((operand_equal_p (n0, a0, 0)
11209 && operand_equal_p (n1, a1, 0))
11210 || (operand_equal_p (n0, a1, 0)
11211 && operand_equal_p (n1, a0, 0)))
11212 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11215 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11216 if (t1 != NULL_TREE)
11217 return t1;
11219 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11221 This results in more efficient code for machines without a NAND
11222 instruction. Combine will canonicalize to the first form
11223 which will allow use of NAND instructions provided by the
11224 backend if they exist. */
11225 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11226 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11228 return
11229 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11230 build2 (BIT_AND_EXPR, type,
11231 fold_convert_loc (loc, type,
11232 TREE_OPERAND (arg0, 0)),
11233 fold_convert_loc (loc, type,
11234 TREE_OPERAND (arg1, 0))));
11237 /* See if this can be simplified into a rotate first. If that
11238 is unsuccessful continue in the association code. */
11239 goto bit_rotate;
11241 case BIT_XOR_EXPR:
11242 if (integer_zerop (arg1))
11243 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11244 if (integer_all_onesp (arg1))
11245 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11246 if (operand_equal_p (arg0, arg1, 0))
11247 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11249 /* ~X ^ X is -1. */
11250 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11251 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11253 t1 = build_zero_cst (type);
11254 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11255 return omit_one_operand_loc (loc, type, t1, arg1);
11258 /* X ^ ~X is -1. */
11259 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11260 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11262 t1 = build_zero_cst (type);
11263 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11264 return omit_one_operand_loc (loc, type, t1, arg0);
11267 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11268 with a constant, and the two constants have no bits in common,
11269 we should treat this as a BIT_IOR_EXPR since this may produce more
11270 simplifications. */
11271 if (TREE_CODE (arg0) == BIT_AND_EXPR
11272 && TREE_CODE (arg1) == BIT_AND_EXPR
11273 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11274 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11275 && integer_zerop (const_binop (BIT_AND_EXPR,
11276 TREE_OPERAND (arg0, 1),
11277 TREE_OPERAND (arg1, 1))))
11279 code = BIT_IOR_EXPR;
11280 goto bit_ior;
11283 /* (X | Y) ^ X -> Y & ~ X*/
11284 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11285 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11287 tree t2 = TREE_OPERAND (arg0, 1);
11288 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11289 arg1);
11290 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11291 fold_convert_loc (loc, type, t2),
11292 fold_convert_loc (loc, type, t1));
11293 return t1;
11296 /* (Y | X) ^ X -> Y & ~ X*/
11297 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11298 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11300 tree t2 = TREE_OPERAND (arg0, 0);
11301 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11302 arg1);
11303 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11304 fold_convert_loc (loc, type, t2),
11305 fold_convert_loc (loc, type, t1));
11306 return t1;
11309 /* X ^ (X | Y) -> Y & ~ X*/
11310 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11311 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11313 tree t2 = TREE_OPERAND (arg1, 1);
11314 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11315 arg0);
11316 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11317 fold_convert_loc (loc, type, t2),
11318 fold_convert_loc (loc, type, t1));
11319 return t1;
11322 /* X ^ (Y | X) -> Y & ~ X*/
11323 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11324 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11326 tree t2 = TREE_OPERAND (arg1, 0);
11327 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11328 arg0);
11329 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11330 fold_convert_loc (loc, type, t2),
11331 fold_convert_loc (loc, type, t1));
11332 return t1;
11335 /* Convert ~X ^ ~Y to X ^ Y. */
11336 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11337 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11338 return fold_build2_loc (loc, code, type,
11339 fold_convert_loc (loc, type,
11340 TREE_OPERAND (arg0, 0)),
11341 fold_convert_loc (loc, type,
11342 TREE_OPERAND (arg1, 0)));
11344 /* Convert ~X ^ C to X ^ ~C. */
11345 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11346 && TREE_CODE (arg1) == INTEGER_CST)
11347 return fold_build2_loc (loc, code, type,
11348 fold_convert_loc (loc, type,
11349 TREE_OPERAND (arg0, 0)),
11350 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11352 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11353 if (TREE_CODE (arg0) == BIT_AND_EXPR
11354 && integer_onep (TREE_OPERAND (arg0, 1))
11355 && integer_onep (arg1))
11356 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11357 build_zero_cst (TREE_TYPE (arg0)));
11359 /* Fold (X & Y) ^ Y as ~X & Y. */
11360 if (TREE_CODE (arg0) == BIT_AND_EXPR
11361 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11363 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11364 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11365 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11366 fold_convert_loc (loc, type, arg1));
11368 /* Fold (X & Y) ^ X as ~Y & X. */
11369 if (TREE_CODE (arg0) == BIT_AND_EXPR
11370 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11371 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11373 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11374 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11375 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11376 fold_convert_loc (loc, type, arg1));
11378 /* Fold X ^ (X & Y) as X & ~Y. */
11379 if (TREE_CODE (arg1) == BIT_AND_EXPR
11380 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11382 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11383 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11384 fold_convert_loc (loc, type, arg0),
11385 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11387 /* Fold X ^ (Y & X) as ~Y & X. */
11388 if (TREE_CODE (arg1) == BIT_AND_EXPR
11389 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11390 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11392 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11393 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11394 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11395 fold_convert_loc (loc, type, arg0));
11398 /* See if this can be simplified into a rotate first. If that
11399 is unsuccessful continue in the association code. */
11400 goto bit_rotate;
11402 case BIT_AND_EXPR:
11403 if (integer_all_onesp (arg1))
11404 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11405 if (integer_zerop (arg1))
11406 return omit_one_operand_loc (loc, type, arg1, arg0);
11407 if (operand_equal_p (arg0, arg1, 0))
11408 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11410 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11411 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11412 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11413 || (TREE_CODE (arg0) == EQ_EXPR
11414 && integer_zerop (TREE_OPERAND (arg0, 1))))
11415 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11416 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11418 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11419 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11420 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11421 || (TREE_CODE (arg1) == EQ_EXPR
11422 && integer_zerop (TREE_OPERAND (arg1, 1))))
11423 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11424 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11426 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11427 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11428 && TREE_CODE (arg1) == INTEGER_CST
11429 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11431 tree tmp1 = fold_convert_loc (loc, type, arg1);
11432 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11433 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11434 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11435 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11436 return
11437 fold_convert_loc (loc, type,
11438 fold_build2_loc (loc, BIT_IOR_EXPR,
11439 type, tmp2, tmp3));
11442 /* (X | Y) & Y is (X, Y). */
11443 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11444 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11445 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11446 /* (X | Y) & X is (Y, X). */
11447 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11448 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11449 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11450 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11451 /* X & (X | Y) is (Y, X). */
11452 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11453 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11454 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11455 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11456 /* X & (Y | X) is (Y, X). */
11457 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11458 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11459 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11460 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11462 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11463 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11464 && integer_onep (TREE_OPERAND (arg0, 1))
11465 && integer_onep (arg1))
11467 tree tem2;
11468 tem = TREE_OPERAND (arg0, 0);
11469 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11470 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11471 tem, tem2);
11472 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11473 build_zero_cst (TREE_TYPE (tem)));
11475 /* Fold ~X & 1 as (X & 1) == 0. */
11476 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11477 && integer_onep (arg1))
11479 tree tem2;
11480 tem = TREE_OPERAND (arg0, 0);
11481 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11482 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11483 tem, tem2);
11484 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11485 build_zero_cst (TREE_TYPE (tem)));
11487 /* Fold !X & 1 as X == 0. */
11488 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11489 && integer_onep (arg1))
11491 tem = TREE_OPERAND (arg0, 0);
11492 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11493 build_zero_cst (TREE_TYPE (tem)));
11496 /* Fold (X ^ Y) & Y as ~X & Y. */
11497 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11498 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11500 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11501 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11502 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11503 fold_convert_loc (loc, type, arg1));
11505 /* Fold (X ^ Y) & X as ~Y & X. */
11506 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11507 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11508 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11510 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11511 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11512 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11513 fold_convert_loc (loc, type, arg1));
11515 /* Fold X & (X ^ Y) as X & ~Y. */
11516 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11517 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11519 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11520 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11521 fold_convert_loc (loc, type, arg0),
11522 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11524 /* Fold X & (Y ^ X) as ~Y & X. */
11525 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11526 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11527 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11529 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11530 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11531 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11532 fold_convert_loc (loc, type, arg0));
11535 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11536 multiple of 1 << CST. */
11537 if (TREE_CODE (arg1) == INTEGER_CST)
11539 double_int cst1 = tree_to_double_int (arg1);
11540 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11541 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11542 if ((cst1 & ncst1) == ncst1
11543 && multiple_of_p (type, arg0,
11544 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11545 return fold_convert_loc (loc, type, arg0);
11548 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11549 bits from CST2. */
11550 if (TREE_CODE (arg1) == INTEGER_CST
11551 && TREE_CODE (arg0) == MULT_EXPR
11552 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11554 int arg1tz
11555 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11556 if (arg1tz > 0)
11558 double_int arg1mask, masked;
11559 arg1mask = ~double_int::mask (arg1tz);
11560 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11561 TYPE_UNSIGNED (type));
11562 masked = arg1mask & tree_to_double_int (arg1);
11563 if (masked.is_zero ())
11564 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11565 arg0, arg1);
11566 else if (masked != tree_to_double_int (arg1))
11567 return fold_build2_loc (loc, code, type, op0,
11568 double_int_to_tree (type, masked));
11572 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11573 ((A & N) + B) & M -> (A + B) & M
11574 Similarly if (N & M) == 0,
11575 ((A | N) + B) & M -> (A + B) & M
11576 and for - instead of + (or unary - instead of +)
11577 and/or ^ instead of |.
11578 If B is constant and (B & M) == 0, fold into A & M. */
11579 if (host_integerp (arg1, 1))
11581 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11582 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11583 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11584 && (TREE_CODE (arg0) == PLUS_EXPR
11585 || TREE_CODE (arg0) == MINUS_EXPR
11586 || TREE_CODE (arg0) == NEGATE_EXPR)
11587 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11588 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11590 tree pmop[2];
11591 int which = 0;
11592 unsigned HOST_WIDE_INT cst0;
11594 /* Now we know that arg0 is (C + D) or (C - D) or
11595 -C and arg1 (M) is == (1LL << cst) - 1.
11596 Store C into PMOP[0] and D into PMOP[1]. */
11597 pmop[0] = TREE_OPERAND (arg0, 0);
11598 pmop[1] = NULL;
11599 if (TREE_CODE (arg0) != NEGATE_EXPR)
11601 pmop[1] = TREE_OPERAND (arg0, 1);
11602 which = 1;
11605 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11606 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11607 & cst1) != cst1)
11608 which = -1;
11610 for (; which >= 0; which--)
11611 switch (TREE_CODE (pmop[which]))
11613 case BIT_AND_EXPR:
11614 case BIT_IOR_EXPR:
11615 case BIT_XOR_EXPR:
11616 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11617 != INTEGER_CST)
11618 break;
11619 /* tree_low_cst not used, because we don't care about
11620 the upper bits. */
11621 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11622 cst0 &= cst1;
11623 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11625 if (cst0 != cst1)
11626 break;
11628 else if (cst0 != 0)
11629 break;
11630 /* If C or D is of the form (A & N) where
11631 (N & M) == M, or of the form (A | N) or
11632 (A ^ N) where (N & M) == 0, replace it with A. */
11633 pmop[which] = TREE_OPERAND (pmop[which], 0);
11634 break;
11635 case INTEGER_CST:
11636 /* If C or D is a N where (N & M) == 0, it can be
11637 omitted (assumed 0). */
11638 if ((TREE_CODE (arg0) == PLUS_EXPR
11639 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11640 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11641 pmop[which] = NULL;
11642 break;
11643 default:
11644 break;
11647 /* Only build anything new if we optimized one or both arguments
11648 above. */
11649 if (pmop[0] != TREE_OPERAND (arg0, 0)
11650 || (TREE_CODE (arg0) != NEGATE_EXPR
11651 && pmop[1] != TREE_OPERAND (arg0, 1)))
11653 tree utype = TREE_TYPE (arg0);
11654 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11656 /* Perform the operations in a type that has defined
11657 overflow behavior. */
11658 utype = unsigned_type_for (TREE_TYPE (arg0));
11659 if (pmop[0] != NULL)
11660 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11661 if (pmop[1] != NULL)
11662 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11665 if (TREE_CODE (arg0) == NEGATE_EXPR)
11666 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11667 else if (TREE_CODE (arg0) == PLUS_EXPR)
11669 if (pmop[0] != NULL && pmop[1] != NULL)
11670 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11671 pmop[0], pmop[1]);
11672 else if (pmop[0] != NULL)
11673 tem = pmop[0];
11674 else if (pmop[1] != NULL)
11675 tem = pmop[1];
11676 else
11677 return build_int_cst (type, 0);
11679 else if (pmop[0] == NULL)
11680 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11681 else
11682 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11683 pmop[0], pmop[1]);
11684 /* TEM is now the new binary +, - or unary - replacement. */
11685 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11686 fold_convert_loc (loc, utype, arg1));
11687 return fold_convert_loc (loc, type, tem);
11692 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11693 if (t1 != NULL_TREE)
11694 return t1;
11695 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11696 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11697 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11699 unsigned int prec
11700 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11702 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11703 && (~TREE_INT_CST_LOW (arg1)
11704 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11705 return
11706 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11709 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11711 This results in more efficient code for machines without a NOR
11712 instruction. Combine will canonicalize to the first form
11713 which will allow use of NOR instructions provided by the
11714 backend if they exist. */
11715 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11716 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11718 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11719 build2 (BIT_IOR_EXPR, type,
11720 fold_convert_loc (loc, type,
11721 TREE_OPERAND (arg0, 0)),
11722 fold_convert_loc (loc, type,
11723 TREE_OPERAND (arg1, 0))));
11726 /* If arg0 is derived from the address of an object or function, we may
11727 be able to fold this expression using the object or function's
11728 alignment. */
11729 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11731 unsigned HOST_WIDE_INT modulus, residue;
11732 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11734 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11735 integer_onep (arg1));
11737 /* This works because modulus is a power of 2. If this weren't the
11738 case, we'd have to replace it by its greatest power-of-2
11739 divisor: modulus & -modulus. */
11740 if (low < modulus)
11741 return build_int_cst (type, residue & low);
11744 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11745 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11746 if the new mask might be further optimized. */
11747 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11748 || TREE_CODE (arg0) == RSHIFT_EXPR)
11749 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11750 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11751 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11752 < TYPE_PRECISION (TREE_TYPE (arg0))
11753 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11754 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11756 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11757 unsigned HOST_WIDE_INT mask
11758 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11759 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11760 tree shift_type = TREE_TYPE (arg0);
11762 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11763 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11764 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11765 && TYPE_PRECISION (TREE_TYPE (arg0))
11766 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11768 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11769 tree arg00 = TREE_OPERAND (arg0, 0);
11770 /* See if more bits can be proven as zero because of
11771 zero extension. */
11772 if (TREE_CODE (arg00) == NOP_EXPR
11773 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11775 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11776 if (TYPE_PRECISION (inner_type)
11777 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11778 && TYPE_PRECISION (inner_type) < prec)
11780 prec = TYPE_PRECISION (inner_type);
11781 /* See if we can shorten the right shift. */
11782 if (shiftc < prec)
11783 shift_type = inner_type;
11786 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11787 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11788 zerobits <<= prec - shiftc;
11789 /* For arithmetic shift if sign bit could be set, zerobits
11790 can contain actually sign bits, so no transformation is
11791 possible, unless MASK masks them all away. In that
11792 case the shift needs to be converted into logical shift. */
11793 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11794 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11796 if ((mask & zerobits) == 0)
11797 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11798 else
11799 zerobits = 0;
11803 /* ((X << 16) & 0xff00) is (X, 0). */
11804 if ((mask & zerobits) == mask)
11805 return omit_one_operand_loc (loc, type,
11806 build_int_cst (type, 0), arg0);
11808 newmask = mask | zerobits;
11809 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11811 unsigned int prec;
11813 /* Only do the transformation if NEWMASK is some integer
11814 mode's mask. */
11815 for (prec = BITS_PER_UNIT;
11816 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11817 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11818 break;
11819 if (prec < HOST_BITS_PER_WIDE_INT
11820 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11822 tree newmaskt;
11824 if (shift_type != TREE_TYPE (arg0))
11826 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11827 fold_convert_loc (loc, shift_type,
11828 TREE_OPERAND (arg0, 0)),
11829 TREE_OPERAND (arg0, 1));
11830 tem = fold_convert_loc (loc, type, tem);
11832 else
11833 tem = op0;
11834 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11835 if (!tree_int_cst_equal (newmaskt, arg1))
11836 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11841 goto associate;
11843 case RDIV_EXPR:
11844 /* Don't touch a floating-point divide by zero unless the mode
11845 of the constant can represent infinity. */
11846 if (TREE_CODE (arg1) == REAL_CST
11847 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11848 && real_zerop (arg1))
11849 return NULL_TREE;
11851 /* Optimize A / A to 1.0 if we don't care about
11852 NaNs or Infinities. Skip the transformation
11853 for non-real operands. */
11854 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11855 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11856 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11857 && operand_equal_p (arg0, arg1, 0))
11859 tree r = build_real (TREE_TYPE (arg0), dconst1);
11861 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11864 /* The complex version of the above A / A optimization. */
11865 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11866 && operand_equal_p (arg0, arg1, 0))
11868 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11869 if (! HONOR_NANS (TYPE_MODE (elem_type))
11870 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11872 tree r = build_real (elem_type, dconst1);
11873 /* omit_two_operands will call fold_convert for us. */
11874 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11878 /* (-A) / (-B) -> A / B */
11879 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11880 return fold_build2_loc (loc, RDIV_EXPR, type,
11881 TREE_OPERAND (arg0, 0),
11882 negate_expr (arg1));
11883 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11884 return fold_build2_loc (loc, RDIV_EXPR, type,
11885 negate_expr (arg0),
11886 TREE_OPERAND (arg1, 0));
11888 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11889 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11890 && real_onep (arg1))
11891 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11893 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11894 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11895 && real_minus_onep (arg1))
11896 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11897 negate_expr (arg0)));
11899 /* If ARG1 is a constant, we can convert this to a multiply by the
11900 reciprocal. This does not have the same rounding properties,
11901 so only do this if -freciprocal-math. We can actually
11902 always safely do it if ARG1 is a power of two, but it's hard to
11903 tell if it is or not in a portable manner. */
11904 if (optimize
11905 && (TREE_CODE (arg1) == REAL_CST
11906 || (TREE_CODE (arg1) == COMPLEX_CST
11907 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11908 || (TREE_CODE (arg1) == VECTOR_CST
11909 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11911 if (flag_reciprocal_math
11912 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11913 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11914 /* Find the reciprocal if optimizing and the result is exact.
11915 TODO: Complex reciprocal not implemented. */
11916 if (TREE_CODE (arg1) != COMPLEX_CST)
11918 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11920 if (inverse)
11921 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11924 /* Convert A/B/C to A/(B*C). */
11925 if (flag_reciprocal_math
11926 && TREE_CODE (arg0) == RDIV_EXPR)
11927 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11928 fold_build2_loc (loc, MULT_EXPR, type,
11929 TREE_OPERAND (arg0, 1), arg1));
11931 /* Convert A/(B/C) to (A/B)*C. */
11932 if (flag_reciprocal_math
11933 && TREE_CODE (arg1) == RDIV_EXPR)
11934 return fold_build2_loc (loc, MULT_EXPR, type,
11935 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11936 TREE_OPERAND (arg1, 0)),
11937 TREE_OPERAND (arg1, 1));
11939 /* Convert C1/(X*C2) into (C1/C2)/X. */
11940 if (flag_reciprocal_math
11941 && TREE_CODE (arg1) == MULT_EXPR
11942 && TREE_CODE (arg0) == REAL_CST
11943 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11945 tree tem = const_binop (RDIV_EXPR, arg0,
11946 TREE_OPERAND (arg1, 1));
11947 if (tem)
11948 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11949 TREE_OPERAND (arg1, 0));
11952 if (flag_unsafe_math_optimizations)
11954 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11955 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11957 /* Optimize sin(x)/cos(x) as tan(x). */
11958 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11959 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11960 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11961 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11962 CALL_EXPR_ARG (arg1, 0), 0))
11964 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11966 if (tanfn != NULL_TREE)
11967 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11970 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11971 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11972 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11973 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11974 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11975 CALL_EXPR_ARG (arg1, 0), 0))
11977 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11979 if (tanfn != NULL_TREE)
11981 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11982 CALL_EXPR_ARG (arg0, 0));
11983 return fold_build2_loc (loc, RDIV_EXPR, type,
11984 build_real (type, dconst1), tmp);
11988 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11989 NaNs or Infinities. */
11990 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11991 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11992 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11994 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11995 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11997 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11998 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11999 && operand_equal_p (arg00, arg01, 0))
12001 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12003 if (cosfn != NULL_TREE)
12004 return build_call_expr_loc (loc, cosfn, 1, arg00);
12008 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12009 NaNs or Infinities. */
12010 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12011 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12012 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12014 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12015 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12017 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12018 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12019 && operand_equal_p (arg00, arg01, 0))
12021 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12023 if (cosfn != NULL_TREE)
12025 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12026 return fold_build2_loc (loc, RDIV_EXPR, type,
12027 build_real (type, dconst1),
12028 tmp);
12033 /* Optimize pow(x,c)/x as pow(x,c-1). */
12034 if (fcode0 == BUILT_IN_POW
12035 || fcode0 == BUILT_IN_POWF
12036 || fcode0 == BUILT_IN_POWL)
12038 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12039 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12040 if (TREE_CODE (arg01) == REAL_CST
12041 && !TREE_OVERFLOW (arg01)
12042 && operand_equal_p (arg1, arg00, 0))
12044 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12045 REAL_VALUE_TYPE c;
12046 tree arg;
12048 c = TREE_REAL_CST (arg01);
12049 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12050 arg = build_real (type, c);
12051 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12055 /* Optimize a/root(b/c) into a*root(c/b). */
12056 if (BUILTIN_ROOT_P (fcode1))
12058 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12060 if (TREE_CODE (rootarg) == RDIV_EXPR)
12062 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12063 tree b = TREE_OPERAND (rootarg, 0);
12064 tree c = TREE_OPERAND (rootarg, 1);
12066 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12068 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12069 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12073 /* Optimize x/expN(y) into x*expN(-y). */
12074 if (BUILTIN_EXPONENT_P (fcode1))
12076 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12077 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12078 arg1 = build_call_expr_loc (loc,
12079 expfn, 1,
12080 fold_convert_loc (loc, type, arg));
12081 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12084 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12085 if (fcode1 == BUILT_IN_POW
12086 || fcode1 == BUILT_IN_POWF
12087 || fcode1 == BUILT_IN_POWL)
12089 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12090 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12091 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12092 tree neg11 = fold_convert_loc (loc, type,
12093 negate_expr (arg11));
12094 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12095 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12098 return NULL_TREE;
12100 case TRUNC_DIV_EXPR:
12101 /* Optimize (X & (-A)) / A where A is a power of 2,
12102 to X >> log2(A) */
12103 if (TREE_CODE (arg0) == BIT_AND_EXPR
12104 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12105 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12107 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12108 arg1, TREE_OPERAND (arg0, 1));
12109 if (sum && integer_zerop (sum)) {
12110 unsigned long pow2;
12112 if (TREE_INT_CST_LOW (arg1))
12113 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12114 else
12115 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12116 + HOST_BITS_PER_WIDE_INT;
12118 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12119 TREE_OPERAND (arg0, 0),
12120 build_int_cst (integer_type_node, pow2));
12124 /* Fall through */
12126 case FLOOR_DIV_EXPR:
12127 /* Simplify A / (B << N) where A and B are positive and B is
12128 a power of 2, to A >> (N + log2(B)). */
12129 strict_overflow_p = false;
12130 if (TREE_CODE (arg1) == LSHIFT_EXPR
12131 && (TYPE_UNSIGNED (type)
12132 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12134 tree sval = TREE_OPERAND (arg1, 0);
12135 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12137 tree sh_cnt = TREE_OPERAND (arg1, 1);
12138 unsigned long pow2;
12140 if (TREE_INT_CST_LOW (sval))
12141 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12142 else
12143 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12144 + HOST_BITS_PER_WIDE_INT;
12146 if (strict_overflow_p)
12147 fold_overflow_warning (("assuming signed overflow does not "
12148 "occur when simplifying A / (B << N)"),
12149 WARN_STRICT_OVERFLOW_MISC);
12151 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12152 sh_cnt,
12153 build_int_cst (TREE_TYPE (sh_cnt),
12154 pow2));
12155 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12156 fold_convert_loc (loc, type, arg0), sh_cnt);
12160 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12161 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12162 if (INTEGRAL_TYPE_P (type)
12163 && TYPE_UNSIGNED (type)
12164 && code == FLOOR_DIV_EXPR)
12165 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12167 /* Fall through */
12169 case ROUND_DIV_EXPR:
12170 case CEIL_DIV_EXPR:
12171 case EXACT_DIV_EXPR:
12172 if (integer_onep (arg1))
12173 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12174 if (integer_zerop (arg1))
12175 return NULL_TREE;
12176 /* X / -1 is -X. */
12177 if (!TYPE_UNSIGNED (type)
12178 && TREE_CODE (arg1) == INTEGER_CST
12179 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12180 && TREE_INT_CST_HIGH (arg1) == -1)
12181 return fold_convert_loc (loc, type, negate_expr (arg0));
12183 /* Convert -A / -B to A / B when the type is signed and overflow is
12184 undefined. */
12185 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12186 && TREE_CODE (arg0) == NEGATE_EXPR
12187 && negate_expr_p (arg1))
12189 if (INTEGRAL_TYPE_P (type))
12190 fold_overflow_warning (("assuming signed overflow does not occur "
12191 "when distributing negation across "
12192 "division"),
12193 WARN_STRICT_OVERFLOW_MISC);
12194 return fold_build2_loc (loc, code, type,
12195 fold_convert_loc (loc, type,
12196 TREE_OPERAND (arg0, 0)),
12197 fold_convert_loc (loc, type,
12198 negate_expr (arg1)));
12200 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12201 && TREE_CODE (arg1) == NEGATE_EXPR
12202 && negate_expr_p (arg0))
12204 if (INTEGRAL_TYPE_P (type))
12205 fold_overflow_warning (("assuming signed overflow does not occur "
12206 "when distributing negation across "
12207 "division"),
12208 WARN_STRICT_OVERFLOW_MISC);
12209 return fold_build2_loc (loc, code, type,
12210 fold_convert_loc (loc, type,
12211 negate_expr (arg0)),
12212 fold_convert_loc (loc, type,
12213 TREE_OPERAND (arg1, 0)));
12216 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12217 operation, EXACT_DIV_EXPR.
12219 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12220 At one time others generated faster code, it's not clear if they do
12221 after the last round to changes to the DIV code in expmed.c. */
12222 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12223 && multiple_of_p (type, arg0, arg1))
12224 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12226 strict_overflow_p = false;
12227 if (TREE_CODE (arg1) == INTEGER_CST
12228 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12229 &strict_overflow_p)))
12231 if (strict_overflow_p)
12232 fold_overflow_warning (("assuming signed overflow does not occur "
12233 "when simplifying division"),
12234 WARN_STRICT_OVERFLOW_MISC);
12235 return fold_convert_loc (loc, type, tem);
12238 return NULL_TREE;
12240 case CEIL_MOD_EXPR:
12241 case FLOOR_MOD_EXPR:
12242 case ROUND_MOD_EXPR:
12243 case TRUNC_MOD_EXPR:
12244 /* X % 1 is always zero, but be sure to preserve any side
12245 effects in X. */
12246 if (integer_onep (arg1))
12247 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12249 /* X % 0, return X % 0 unchanged so that we can get the
12250 proper warnings and errors. */
12251 if (integer_zerop (arg1))
12252 return NULL_TREE;
12254 /* 0 % X is always zero, but be sure to preserve any side
12255 effects in X. Place this after checking for X == 0. */
12256 if (integer_zerop (arg0))
12257 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12259 /* X % -1 is zero. */
12260 if (!TYPE_UNSIGNED (type)
12261 && TREE_CODE (arg1) == INTEGER_CST
12262 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12263 && TREE_INT_CST_HIGH (arg1) == -1)
12264 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12266 /* X % -C is the same as X % C. */
12267 if (code == TRUNC_MOD_EXPR
12268 && !TYPE_UNSIGNED (type)
12269 && TREE_CODE (arg1) == INTEGER_CST
12270 && !TREE_OVERFLOW (arg1)
12271 && TREE_INT_CST_HIGH (arg1) < 0
12272 && !TYPE_OVERFLOW_TRAPS (type)
12273 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12274 && !sign_bit_p (arg1, arg1))
12275 return fold_build2_loc (loc, code, type,
12276 fold_convert_loc (loc, type, arg0),
12277 fold_convert_loc (loc, type,
12278 negate_expr (arg1)));
12280 /* X % -Y is the same as X % Y. */
12281 if (code == TRUNC_MOD_EXPR
12282 && !TYPE_UNSIGNED (type)
12283 && TREE_CODE (arg1) == NEGATE_EXPR
12284 && !TYPE_OVERFLOW_TRAPS (type))
12285 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12286 fold_convert_loc (loc, type,
12287 TREE_OPERAND (arg1, 0)));
12289 strict_overflow_p = false;
12290 if (TREE_CODE (arg1) == INTEGER_CST
12291 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12292 &strict_overflow_p)))
12294 if (strict_overflow_p)
12295 fold_overflow_warning (("assuming signed overflow does not occur "
12296 "when simplifying modulus"),
12297 WARN_STRICT_OVERFLOW_MISC);
12298 return fold_convert_loc (loc, type, tem);
12301 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12302 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12303 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12304 && (TYPE_UNSIGNED (type)
12305 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12307 tree c = arg1;
12308 /* Also optimize A % (C << N) where C is a power of 2,
12309 to A & ((C << N) - 1). */
12310 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12311 c = TREE_OPERAND (arg1, 0);
12313 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12315 tree mask
12316 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12317 build_int_cst (TREE_TYPE (arg1), 1));
12318 if (strict_overflow_p)
12319 fold_overflow_warning (("assuming signed overflow does not "
12320 "occur when simplifying "
12321 "X % (power of two)"),
12322 WARN_STRICT_OVERFLOW_MISC);
12323 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12324 fold_convert_loc (loc, type, arg0),
12325 fold_convert_loc (loc, type, mask));
12329 return NULL_TREE;
12331 case LROTATE_EXPR:
12332 case RROTATE_EXPR:
12333 if (integer_all_onesp (arg0))
12334 return omit_one_operand_loc (loc, type, arg0, arg1);
12335 goto shift;
12337 case RSHIFT_EXPR:
12338 /* Optimize -1 >> x for arithmetic right shifts. */
12339 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12340 && tree_expr_nonnegative_p (arg1))
12341 return omit_one_operand_loc (loc, type, arg0, arg1);
12342 /* ... fall through ... */
12344 case LSHIFT_EXPR:
12345 shift:
12346 if (integer_zerop (arg1))
12347 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12348 if (integer_zerop (arg0))
12349 return omit_one_operand_loc (loc, type, arg0, arg1);
12351 /* Since negative shift count is not well-defined,
12352 don't try to compute it in the compiler. */
12353 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12354 return NULL_TREE;
12356 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12357 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12358 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12359 && host_integerp (TREE_OPERAND (arg0, 1), false)
12360 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12362 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12363 + TREE_INT_CST_LOW (arg1));
12365 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12366 being well defined. */
12367 if (low >= TYPE_PRECISION (type))
12369 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12370 low = low % TYPE_PRECISION (type);
12371 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12372 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12373 TREE_OPERAND (arg0, 0));
12374 else
12375 low = TYPE_PRECISION (type) - 1;
12378 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12379 build_int_cst (type, low));
12382 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12383 into x & ((unsigned)-1 >> c) for unsigned types. */
12384 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12385 || (TYPE_UNSIGNED (type)
12386 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12387 && host_integerp (arg1, false)
12388 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12389 && host_integerp (TREE_OPERAND (arg0, 1), false)
12390 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12392 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12393 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12394 tree lshift;
12395 tree arg00;
12397 if (low0 == low1)
12399 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12401 lshift = build_int_cst (type, -1);
12402 lshift = int_const_binop (code, lshift, arg1);
12404 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12408 /* Rewrite an LROTATE_EXPR by a constant into an
12409 RROTATE_EXPR by a new constant. */
12410 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12412 tree tem = build_int_cst (TREE_TYPE (arg1),
12413 TYPE_PRECISION (type));
12414 tem = const_binop (MINUS_EXPR, tem, arg1);
12415 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12418 /* If we have a rotate of a bit operation with the rotate count and
12419 the second operand of the bit operation both constant,
12420 permute the two operations. */
12421 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12422 && (TREE_CODE (arg0) == BIT_AND_EXPR
12423 || TREE_CODE (arg0) == BIT_IOR_EXPR
12424 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12425 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12426 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12427 fold_build2_loc (loc, code, type,
12428 TREE_OPERAND (arg0, 0), arg1),
12429 fold_build2_loc (loc, code, type,
12430 TREE_OPERAND (arg0, 1), arg1));
12432 /* Two consecutive rotates adding up to the precision of the
12433 type can be ignored. */
12434 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12435 && TREE_CODE (arg0) == RROTATE_EXPR
12436 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12437 && TREE_INT_CST_HIGH (arg1) == 0
12438 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12439 && ((TREE_INT_CST_LOW (arg1)
12440 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12441 == (unsigned int) TYPE_PRECISION (type)))
12442 return TREE_OPERAND (arg0, 0);
12444 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12445 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12446 if the latter can be further optimized. */
12447 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12448 && TREE_CODE (arg0) == BIT_AND_EXPR
12449 && TREE_CODE (arg1) == INTEGER_CST
12450 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12452 tree mask = fold_build2_loc (loc, code, type,
12453 fold_convert_loc (loc, type,
12454 TREE_OPERAND (arg0, 1)),
12455 arg1);
12456 tree shift = fold_build2_loc (loc, code, type,
12457 fold_convert_loc (loc, type,
12458 TREE_OPERAND (arg0, 0)),
12459 arg1);
12460 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12461 if (tem)
12462 return tem;
12465 return NULL_TREE;
12467 case MIN_EXPR:
12468 if (operand_equal_p (arg0, arg1, 0))
12469 return omit_one_operand_loc (loc, type, arg0, arg1);
12470 if (INTEGRAL_TYPE_P (type)
12471 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12472 return omit_one_operand_loc (loc, type, arg1, arg0);
12473 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12474 if (tem)
12475 return tem;
12476 goto associate;
12478 case MAX_EXPR:
12479 if (operand_equal_p (arg0, arg1, 0))
12480 return omit_one_operand_loc (loc, type, arg0, arg1);
12481 if (INTEGRAL_TYPE_P (type)
12482 && TYPE_MAX_VALUE (type)
12483 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12484 return omit_one_operand_loc (loc, type, arg1, arg0);
12485 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12486 if (tem)
12487 return tem;
12488 goto associate;
12490 case TRUTH_ANDIF_EXPR:
12491 /* Note that the operands of this must be ints
12492 and their values must be 0 or 1.
12493 ("true" is a fixed value perhaps depending on the language.) */
12494 /* If first arg is constant zero, return it. */
12495 if (integer_zerop (arg0))
12496 return fold_convert_loc (loc, type, arg0);
12497 case TRUTH_AND_EXPR:
12498 /* If either arg is constant true, drop it. */
12499 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12500 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12501 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12502 /* Preserve sequence points. */
12503 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12504 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12505 /* If second arg is constant zero, result is zero, but first arg
12506 must be evaluated. */
12507 if (integer_zerop (arg1))
12508 return omit_one_operand_loc (loc, type, arg1, arg0);
12509 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12510 case will be handled here. */
12511 if (integer_zerop (arg0))
12512 return omit_one_operand_loc (loc, type, arg0, arg1);
12514 /* !X && X is always false. */
12515 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12516 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12517 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12518 /* X && !X is always false. */
12519 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12520 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12521 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12523 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12524 means A >= Y && A != MAX, but in this case we know that
12525 A < X <= MAX. */
12527 if (!TREE_SIDE_EFFECTS (arg0)
12528 && !TREE_SIDE_EFFECTS (arg1))
12530 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12531 if (tem && !operand_equal_p (tem, arg0, 0))
12532 return fold_build2_loc (loc, code, type, tem, arg1);
12534 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12535 if (tem && !operand_equal_p (tem, arg1, 0))
12536 return fold_build2_loc (loc, code, type, arg0, tem);
12539 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12540 != NULL_TREE)
12541 return tem;
12543 return NULL_TREE;
12545 case TRUTH_ORIF_EXPR:
12546 /* Note that the operands of this must be ints
12547 and their values must be 0 or true.
12548 ("true" is a fixed value perhaps depending on the language.) */
12549 /* If first arg is constant true, return it. */
12550 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12551 return fold_convert_loc (loc, type, arg0);
12552 case TRUTH_OR_EXPR:
12553 /* If either arg is constant zero, drop it. */
12554 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12555 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12556 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12557 /* Preserve sequence points. */
12558 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12559 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12560 /* If second arg is constant true, result is true, but we must
12561 evaluate first arg. */
12562 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12563 return omit_one_operand_loc (loc, type, arg1, arg0);
12564 /* Likewise for first arg, but note this only occurs here for
12565 TRUTH_OR_EXPR. */
12566 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12567 return omit_one_operand_loc (loc, type, arg0, arg1);
12569 /* !X || X is always true. */
12570 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12571 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12572 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12573 /* X || !X is always true. */
12574 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12575 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12576 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12578 /* (X && !Y) || (!X && Y) is X ^ Y */
12579 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12580 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12582 tree a0, a1, l0, l1, n0, n1;
12584 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12585 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12587 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12588 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12590 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12591 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12593 if ((operand_equal_p (n0, a0, 0)
12594 && operand_equal_p (n1, a1, 0))
12595 || (operand_equal_p (n0, a1, 0)
12596 && operand_equal_p (n1, a0, 0)))
12597 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12600 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12601 != NULL_TREE)
12602 return tem;
12604 return NULL_TREE;
12606 case TRUTH_XOR_EXPR:
12607 /* If the second arg is constant zero, drop it. */
12608 if (integer_zerop (arg1))
12609 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12610 /* If the second arg is constant true, this is a logical inversion. */
12611 if (integer_onep (arg1))
12613 /* Only call invert_truthvalue if operand is a truth value. */
12614 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12615 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12616 else
12617 tem = invert_truthvalue_loc (loc, arg0);
12618 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12620 /* Identical arguments cancel to zero. */
12621 if (operand_equal_p (arg0, arg1, 0))
12622 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12624 /* !X ^ X is always true. */
12625 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12626 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12627 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12629 /* X ^ !X is always true. */
12630 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12631 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12632 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12634 return NULL_TREE;
12636 case EQ_EXPR:
12637 case NE_EXPR:
12638 STRIP_NOPS (arg0);
12639 STRIP_NOPS (arg1);
12641 tem = fold_comparison (loc, code, type, op0, op1);
12642 if (tem != NULL_TREE)
12643 return tem;
12645 /* bool_var != 0 becomes bool_var. */
12646 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12647 && code == NE_EXPR)
12648 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12650 /* bool_var == 1 becomes bool_var. */
12651 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12652 && code == EQ_EXPR)
12653 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12655 /* bool_var != 1 becomes !bool_var. */
12656 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12657 && code == NE_EXPR)
12658 return fold_convert_loc (loc, type,
12659 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12660 TREE_TYPE (arg0), arg0));
12662 /* bool_var == 0 becomes !bool_var. */
12663 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12664 && code == EQ_EXPR)
12665 return fold_convert_loc (loc, type,
12666 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12667 TREE_TYPE (arg0), arg0));
12669 /* !exp != 0 becomes !exp */
12670 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12671 && code == NE_EXPR)
12672 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12674 /* If this is an equality comparison of the address of two non-weak,
12675 unaliased symbols neither of which are extern (since we do not
12676 have access to attributes for externs), then we know the result. */
12677 if (TREE_CODE (arg0) == ADDR_EXPR
12678 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12679 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12680 && ! lookup_attribute ("alias",
12681 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12682 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12683 && TREE_CODE (arg1) == ADDR_EXPR
12684 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12685 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12686 && ! lookup_attribute ("alias",
12687 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12688 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12690 /* We know that we're looking at the address of two
12691 non-weak, unaliased, static _DECL nodes.
12693 It is both wasteful and incorrect to call operand_equal_p
12694 to compare the two ADDR_EXPR nodes. It is wasteful in that
12695 all we need to do is test pointer equality for the arguments
12696 to the two ADDR_EXPR nodes. It is incorrect to use
12697 operand_equal_p as that function is NOT equivalent to a
12698 C equality test. It can in fact return false for two
12699 objects which would test as equal using the C equality
12700 operator. */
12701 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12702 return constant_boolean_node (equal
12703 ? code == EQ_EXPR : code != EQ_EXPR,
12704 type);
12707 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12708 a MINUS_EXPR of a constant, we can convert it into a comparison with
12709 a revised constant as long as no overflow occurs. */
12710 if (TREE_CODE (arg1) == INTEGER_CST
12711 && (TREE_CODE (arg0) == PLUS_EXPR
12712 || TREE_CODE (arg0) == MINUS_EXPR)
12713 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12714 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12715 ? MINUS_EXPR : PLUS_EXPR,
12716 fold_convert_loc (loc, TREE_TYPE (arg0),
12717 arg1),
12718 TREE_OPERAND (arg0, 1)))
12719 && !TREE_OVERFLOW (tem))
12720 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12722 /* Similarly for a NEGATE_EXPR. */
12723 if (TREE_CODE (arg0) == NEGATE_EXPR
12724 && TREE_CODE (arg1) == INTEGER_CST
12725 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12726 arg1)))
12727 && TREE_CODE (tem) == INTEGER_CST
12728 && !TREE_OVERFLOW (tem))
12729 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12731 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12732 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12733 && TREE_CODE (arg1) == INTEGER_CST
12734 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12735 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12736 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12737 fold_convert_loc (loc,
12738 TREE_TYPE (arg0),
12739 arg1),
12740 TREE_OPERAND (arg0, 1)));
12742 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12743 if ((TREE_CODE (arg0) == PLUS_EXPR
12744 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12745 || TREE_CODE (arg0) == MINUS_EXPR)
12746 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12747 0)),
12748 arg1, 0)
12749 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12750 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12752 tree val = TREE_OPERAND (arg0, 1);
12753 return omit_two_operands_loc (loc, type,
12754 fold_build2_loc (loc, code, type,
12755 val,
12756 build_int_cst (TREE_TYPE (val),
12757 0)),
12758 TREE_OPERAND (arg0, 0), arg1);
12761 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12762 if (TREE_CODE (arg0) == MINUS_EXPR
12763 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12764 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12765 1)),
12766 arg1, 0)
12767 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12769 return omit_two_operands_loc (loc, type,
12770 code == NE_EXPR
12771 ? boolean_true_node : boolean_false_node,
12772 TREE_OPERAND (arg0, 1), arg1);
12775 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12776 for !=. Don't do this for ordered comparisons due to overflow. */
12777 if (TREE_CODE (arg0) == MINUS_EXPR
12778 && integer_zerop (arg1))
12779 return fold_build2_loc (loc, code, type,
12780 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12782 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12783 if (TREE_CODE (arg0) == ABS_EXPR
12784 && (integer_zerop (arg1) || real_zerop (arg1)))
12785 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12787 /* If this is an EQ or NE comparison with zero and ARG0 is
12788 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12789 two operations, but the latter can be done in one less insn
12790 on machines that have only two-operand insns or on which a
12791 constant cannot be the first operand. */
12792 if (TREE_CODE (arg0) == BIT_AND_EXPR
12793 && integer_zerop (arg1))
12795 tree arg00 = TREE_OPERAND (arg0, 0);
12796 tree arg01 = TREE_OPERAND (arg0, 1);
12797 if (TREE_CODE (arg00) == LSHIFT_EXPR
12798 && integer_onep (TREE_OPERAND (arg00, 0)))
12800 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12801 arg01, TREE_OPERAND (arg00, 1));
12802 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12803 build_int_cst (TREE_TYPE (arg0), 1));
12804 return fold_build2_loc (loc, code, type,
12805 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12806 arg1);
12808 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12809 && integer_onep (TREE_OPERAND (arg01, 0)))
12811 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12812 arg00, TREE_OPERAND (arg01, 1));
12813 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12814 build_int_cst (TREE_TYPE (arg0), 1));
12815 return fold_build2_loc (loc, code, type,
12816 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12817 arg1);
12821 /* If this is an NE or EQ comparison of zero against the result of a
12822 signed MOD operation whose second operand is a power of 2, make
12823 the MOD operation unsigned since it is simpler and equivalent. */
12824 if (integer_zerop (arg1)
12825 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12826 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12827 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12828 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12829 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12830 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12832 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12833 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12834 fold_convert_loc (loc, newtype,
12835 TREE_OPERAND (arg0, 0)),
12836 fold_convert_loc (loc, newtype,
12837 TREE_OPERAND (arg0, 1)));
12839 return fold_build2_loc (loc, code, type, newmod,
12840 fold_convert_loc (loc, newtype, arg1));
12843 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12844 C1 is a valid shift constant, and C2 is a power of two, i.e.
12845 a single bit. */
12846 if (TREE_CODE (arg0) == BIT_AND_EXPR
12847 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12848 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12849 == INTEGER_CST
12850 && integer_pow2p (TREE_OPERAND (arg0, 1))
12851 && integer_zerop (arg1))
12853 tree itype = TREE_TYPE (arg0);
12854 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12855 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12857 /* Check for a valid shift count. */
12858 if (TREE_INT_CST_HIGH (arg001) == 0
12859 && TREE_INT_CST_LOW (arg001) < prec)
12861 tree arg01 = TREE_OPERAND (arg0, 1);
12862 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12863 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12864 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12865 can be rewritten as (X & (C2 << C1)) != 0. */
12866 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12868 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12869 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12870 return fold_build2_loc (loc, code, type, tem,
12871 fold_convert_loc (loc, itype, arg1));
12873 /* Otherwise, for signed (arithmetic) shifts,
12874 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12875 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12876 else if (!TYPE_UNSIGNED (itype))
12877 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12878 arg000, build_int_cst (itype, 0));
12879 /* Otherwise, of unsigned (logical) shifts,
12880 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12881 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12882 else
12883 return omit_one_operand_loc (loc, type,
12884 code == EQ_EXPR ? integer_one_node
12885 : integer_zero_node,
12886 arg000);
12890 /* If we have (A & C) == C where C is a power of 2, convert this into
12891 (A & C) != 0. Similarly for NE_EXPR. */
12892 if (TREE_CODE (arg0) == BIT_AND_EXPR
12893 && integer_pow2p (TREE_OPERAND (arg0, 1))
12894 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12895 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12896 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12897 integer_zero_node));
12899 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12900 bit, then fold the expression into A < 0 or A >= 0. */
12901 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12902 if (tem)
12903 return tem;
12905 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12906 Similarly for NE_EXPR. */
12907 if (TREE_CODE (arg0) == BIT_AND_EXPR
12908 && TREE_CODE (arg1) == INTEGER_CST
12909 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12911 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12912 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12913 TREE_OPERAND (arg0, 1));
12914 tree dandnotc
12915 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12916 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12917 notc);
12918 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12919 if (integer_nonzerop (dandnotc))
12920 return omit_one_operand_loc (loc, type, rslt, arg0);
12923 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12924 Similarly for NE_EXPR. */
12925 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12926 && TREE_CODE (arg1) == INTEGER_CST
12927 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12929 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12930 tree candnotd
12931 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12932 TREE_OPERAND (arg0, 1),
12933 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12934 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12935 if (integer_nonzerop (candnotd))
12936 return omit_one_operand_loc (loc, type, rslt, arg0);
12939 /* If this is a comparison of a field, we may be able to simplify it. */
12940 if ((TREE_CODE (arg0) == COMPONENT_REF
12941 || TREE_CODE (arg0) == BIT_FIELD_REF)
12942 /* Handle the constant case even without -O
12943 to make sure the warnings are given. */
12944 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12946 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12947 if (t1)
12948 return t1;
12951 /* Optimize comparisons of strlen vs zero to a compare of the
12952 first character of the string vs zero. To wit,
12953 strlen(ptr) == 0 => *ptr == 0
12954 strlen(ptr) != 0 => *ptr != 0
12955 Other cases should reduce to one of these two (or a constant)
12956 due to the return value of strlen being unsigned. */
12957 if (TREE_CODE (arg0) == CALL_EXPR
12958 && integer_zerop (arg1))
12960 tree fndecl = get_callee_fndecl (arg0);
12962 if (fndecl
12963 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12964 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12965 && call_expr_nargs (arg0) == 1
12966 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12968 tree iref = build_fold_indirect_ref_loc (loc,
12969 CALL_EXPR_ARG (arg0, 0));
12970 return fold_build2_loc (loc, code, type, iref,
12971 build_int_cst (TREE_TYPE (iref), 0));
12975 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12976 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12977 if (TREE_CODE (arg0) == RSHIFT_EXPR
12978 && integer_zerop (arg1)
12979 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12981 tree arg00 = TREE_OPERAND (arg0, 0);
12982 tree arg01 = TREE_OPERAND (arg0, 1);
12983 tree itype = TREE_TYPE (arg00);
12984 if (TREE_INT_CST_HIGH (arg01) == 0
12985 && TREE_INT_CST_LOW (arg01)
12986 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12988 if (TYPE_UNSIGNED (itype))
12990 itype = signed_type_for (itype);
12991 arg00 = fold_convert_loc (loc, itype, arg00);
12993 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12994 type, arg00, build_zero_cst (itype));
12998 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12999 if (integer_zerop (arg1)
13000 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13001 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13002 TREE_OPERAND (arg0, 1));
13004 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13005 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13006 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13007 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13008 build_zero_cst (TREE_TYPE (arg0)));
13009 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13010 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13011 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13012 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13013 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13014 build_zero_cst (TREE_TYPE (arg0)));
13016 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13017 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13018 && TREE_CODE (arg1) == INTEGER_CST
13019 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13020 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13021 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13022 TREE_OPERAND (arg0, 1), arg1));
13024 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13025 (X & C) == 0 when C is a single bit. */
13026 if (TREE_CODE (arg0) == BIT_AND_EXPR
13027 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13028 && integer_zerop (arg1)
13029 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13031 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13032 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13033 TREE_OPERAND (arg0, 1));
13034 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13035 type, tem,
13036 fold_convert_loc (loc, TREE_TYPE (arg0),
13037 arg1));
13040 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13041 constant C is a power of two, i.e. a single bit. */
13042 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13043 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13044 && integer_zerop (arg1)
13045 && integer_pow2p (TREE_OPERAND (arg0, 1))
13046 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13047 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13049 tree arg00 = TREE_OPERAND (arg0, 0);
13050 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13051 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13054 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13055 when is C is a power of two, i.e. a single bit. */
13056 if (TREE_CODE (arg0) == BIT_AND_EXPR
13057 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13058 && integer_zerop (arg1)
13059 && integer_pow2p (TREE_OPERAND (arg0, 1))
13060 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13061 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13063 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13064 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13065 arg000, TREE_OPERAND (arg0, 1));
13066 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13067 tem, build_int_cst (TREE_TYPE (tem), 0));
13070 if (integer_zerop (arg1)
13071 && tree_expr_nonzero_p (arg0))
13073 tree res = constant_boolean_node (code==NE_EXPR, type);
13074 return omit_one_operand_loc (loc, type, res, arg0);
13077 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13078 if (TREE_CODE (arg0) == NEGATE_EXPR
13079 && TREE_CODE (arg1) == NEGATE_EXPR)
13080 return fold_build2_loc (loc, code, type,
13081 TREE_OPERAND (arg0, 0),
13082 fold_convert_loc (loc, TREE_TYPE (arg0),
13083 TREE_OPERAND (arg1, 0)));
13085 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13086 if (TREE_CODE (arg0) == BIT_AND_EXPR
13087 && TREE_CODE (arg1) == BIT_AND_EXPR)
13089 tree arg00 = TREE_OPERAND (arg0, 0);
13090 tree arg01 = TREE_OPERAND (arg0, 1);
13091 tree arg10 = TREE_OPERAND (arg1, 0);
13092 tree arg11 = TREE_OPERAND (arg1, 1);
13093 tree itype = TREE_TYPE (arg0);
13095 if (operand_equal_p (arg01, arg11, 0))
13096 return fold_build2_loc (loc, code, type,
13097 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13098 fold_build2_loc (loc,
13099 BIT_XOR_EXPR, itype,
13100 arg00, arg10),
13101 arg01),
13102 build_zero_cst (itype));
13104 if (operand_equal_p (arg01, arg10, 0))
13105 return fold_build2_loc (loc, code, type,
13106 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13107 fold_build2_loc (loc,
13108 BIT_XOR_EXPR, itype,
13109 arg00, arg11),
13110 arg01),
13111 build_zero_cst (itype));
13113 if (operand_equal_p (arg00, arg11, 0))
13114 return fold_build2_loc (loc, code, type,
13115 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13116 fold_build2_loc (loc,
13117 BIT_XOR_EXPR, itype,
13118 arg01, arg10),
13119 arg00),
13120 build_zero_cst (itype));
13122 if (operand_equal_p (arg00, arg10, 0))
13123 return fold_build2_loc (loc, code, type,
13124 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13125 fold_build2_loc (loc,
13126 BIT_XOR_EXPR, itype,
13127 arg01, arg11),
13128 arg00),
13129 build_zero_cst (itype));
13132 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13133 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13135 tree arg00 = TREE_OPERAND (arg0, 0);
13136 tree arg01 = TREE_OPERAND (arg0, 1);
13137 tree arg10 = TREE_OPERAND (arg1, 0);
13138 tree arg11 = TREE_OPERAND (arg1, 1);
13139 tree itype = TREE_TYPE (arg0);
13141 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13142 operand_equal_p guarantees no side-effects so we don't need
13143 to use omit_one_operand on Z. */
13144 if (operand_equal_p (arg01, arg11, 0))
13145 return fold_build2_loc (loc, code, type, arg00,
13146 fold_convert_loc (loc, TREE_TYPE (arg00),
13147 arg10));
13148 if (operand_equal_p (arg01, arg10, 0))
13149 return fold_build2_loc (loc, code, type, arg00,
13150 fold_convert_loc (loc, TREE_TYPE (arg00),
13151 arg11));
13152 if (operand_equal_p (arg00, arg11, 0))
13153 return fold_build2_loc (loc, code, type, arg01,
13154 fold_convert_loc (loc, TREE_TYPE (arg01),
13155 arg10));
13156 if (operand_equal_p (arg00, arg10, 0))
13157 return fold_build2_loc (loc, code, type, arg01,
13158 fold_convert_loc (loc, TREE_TYPE (arg01),
13159 arg11));
13161 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13162 if (TREE_CODE (arg01) == INTEGER_CST
13163 && TREE_CODE (arg11) == INTEGER_CST)
13165 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13166 fold_convert_loc (loc, itype, arg11));
13167 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13168 return fold_build2_loc (loc, code, type, tem,
13169 fold_convert_loc (loc, itype, arg10));
13173 /* Attempt to simplify equality/inequality comparisons of complex
13174 values. Only lower the comparison if the result is known or
13175 can be simplified to a single scalar comparison. */
13176 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13177 || TREE_CODE (arg0) == COMPLEX_CST)
13178 && (TREE_CODE (arg1) == COMPLEX_EXPR
13179 || TREE_CODE (arg1) == COMPLEX_CST))
13181 tree real0, imag0, real1, imag1;
13182 tree rcond, icond;
13184 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13186 real0 = TREE_OPERAND (arg0, 0);
13187 imag0 = TREE_OPERAND (arg0, 1);
13189 else
13191 real0 = TREE_REALPART (arg0);
13192 imag0 = TREE_IMAGPART (arg0);
13195 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13197 real1 = TREE_OPERAND (arg1, 0);
13198 imag1 = TREE_OPERAND (arg1, 1);
13200 else
13202 real1 = TREE_REALPART (arg1);
13203 imag1 = TREE_IMAGPART (arg1);
13206 rcond = fold_binary_loc (loc, code, type, real0, real1);
13207 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13209 if (integer_zerop (rcond))
13211 if (code == EQ_EXPR)
13212 return omit_two_operands_loc (loc, type, boolean_false_node,
13213 imag0, imag1);
13214 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13216 else
13218 if (code == NE_EXPR)
13219 return omit_two_operands_loc (loc, type, boolean_true_node,
13220 imag0, imag1);
13221 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13225 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13226 if (icond && TREE_CODE (icond) == INTEGER_CST)
13228 if (integer_zerop (icond))
13230 if (code == EQ_EXPR)
13231 return omit_two_operands_loc (loc, type, boolean_false_node,
13232 real0, real1);
13233 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13235 else
13237 if (code == NE_EXPR)
13238 return omit_two_operands_loc (loc, type, boolean_true_node,
13239 real0, real1);
13240 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13245 return NULL_TREE;
13247 case LT_EXPR:
13248 case GT_EXPR:
13249 case LE_EXPR:
13250 case GE_EXPR:
13251 tem = fold_comparison (loc, code, type, op0, op1);
13252 if (tem != NULL_TREE)
13253 return tem;
13255 /* Transform comparisons of the form X +- C CMP X. */
13256 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13257 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13258 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13259 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13260 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13261 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13263 tree arg01 = TREE_OPERAND (arg0, 1);
13264 enum tree_code code0 = TREE_CODE (arg0);
13265 int is_positive;
13267 if (TREE_CODE (arg01) == REAL_CST)
13268 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13269 else
13270 is_positive = tree_int_cst_sgn (arg01);
13272 /* (X - c) > X becomes false. */
13273 if (code == GT_EXPR
13274 && ((code0 == MINUS_EXPR && is_positive >= 0)
13275 || (code0 == PLUS_EXPR && is_positive <= 0)))
13277 if (TREE_CODE (arg01) == INTEGER_CST
13278 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13279 fold_overflow_warning (("assuming signed overflow does not "
13280 "occur when assuming that (X - c) > X "
13281 "is always false"),
13282 WARN_STRICT_OVERFLOW_ALL);
13283 return constant_boolean_node (0, type);
13286 /* Likewise (X + c) < X becomes false. */
13287 if (code == LT_EXPR
13288 && ((code0 == PLUS_EXPR && is_positive >= 0)
13289 || (code0 == MINUS_EXPR && is_positive <= 0)))
13291 if (TREE_CODE (arg01) == INTEGER_CST
13292 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13293 fold_overflow_warning (("assuming signed overflow does not "
13294 "occur when assuming that "
13295 "(X + c) < X is always false"),
13296 WARN_STRICT_OVERFLOW_ALL);
13297 return constant_boolean_node (0, type);
13300 /* Convert (X - c) <= X to true. */
13301 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13302 && code == LE_EXPR
13303 && ((code0 == MINUS_EXPR && is_positive >= 0)
13304 || (code0 == PLUS_EXPR && is_positive <= 0)))
13306 if (TREE_CODE (arg01) == INTEGER_CST
13307 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13308 fold_overflow_warning (("assuming signed overflow does not "
13309 "occur when assuming that "
13310 "(X - c) <= X is always true"),
13311 WARN_STRICT_OVERFLOW_ALL);
13312 return constant_boolean_node (1, type);
13315 /* Convert (X + c) >= X to true. */
13316 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13317 && code == GE_EXPR
13318 && ((code0 == PLUS_EXPR && is_positive >= 0)
13319 || (code0 == MINUS_EXPR && is_positive <= 0)))
13321 if (TREE_CODE (arg01) == INTEGER_CST
13322 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13323 fold_overflow_warning (("assuming signed overflow does not "
13324 "occur when assuming that "
13325 "(X + c) >= X is always true"),
13326 WARN_STRICT_OVERFLOW_ALL);
13327 return constant_boolean_node (1, type);
13330 if (TREE_CODE (arg01) == INTEGER_CST)
13332 /* Convert X + c > X and X - c < X to true for integers. */
13333 if (code == GT_EXPR
13334 && ((code0 == PLUS_EXPR && is_positive > 0)
13335 || (code0 == MINUS_EXPR && is_positive < 0)))
13337 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13338 fold_overflow_warning (("assuming signed overflow does "
13339 "not occur when assuming that "
13340 "(X + c) > X is always true"),
13341 WARN_STRICT_OVERFLOW_ALL);
13342 return constant_boolean_node (1, type);
13345 if (code == LT_EXPR
13346 && ((code0 == MINUS_EXPR && is_positive > 0)
13347 || (code0 == PLUS_EXPR && is_positive < 0)))
13349 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13350 fold_overflow_warning (("assuming signed overflow does "
13351 "not occur when assuming that "
13352 "(X - c) < X is always true"),
13353 WARN_STRICT_OVERFLOW_ALL);
13354 return constant_boolean_node (1, type);
13357 /* Convert X + c <= X and X - c >= X to false for integers. */
13358 if (code == LE_EXPR
13359 && ((code0 == PLUS_EXPR && is_positive > 0)
13360 || (code0 == MINUS_EXPR && is_positive < 0)))
13362 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13363 fold_overflow_warning (("assuming signed overflow does "
13364 "not occur when assuming that "
13365 "(X + c) <= X is always false"),
13366 WARN_STRICT_OVERFLOW_ALL);
13367 return constant_boolean_node (0, type);
13370 if (code == GE_EXPR
13371 && ((code0 == MINUS_EXPR && is_positive > 0)
13372 || (code0 == PLUS_EXPR && is_positive < 0)))
13374 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13375 fold_overflow_warning (("assuming signed overflow does "
13376 "not occur when assuming that "
13377 "(X - c) >= X is always false"),
13378 WARN_STRICT_OVERFLOW_ALL);
13379 return constant_boolean_node (0, type);
13384 /* Comparisons with the highest or lowest possible integer of
13385 the specified precision will have known values. */
13387 tree arg1_type = TREE_TYPE (arg1);
13388 unsigned int width = TYPE_PRECISION (arg1_type);
13390 if (TREE_CODE (arg1) == INTEGER_CST
13391 && width <= HOST_BITS_PER_DOUBLE_INT
13392 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13394 HOST_WIDE_INT signed_max_hi;
13395 unsigned HOST_WIDE_INT signed_max_lo;
13396 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13398 if (width <= HOST_BITS_PER_WIDE_INT)
13400 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13401 - 1;
13402 signed_max_hi = 0;
13403 max_hi = 0;
13405 if (TYPE_UNSIGNED (arg1_type))
13407 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13408 min_lo = 0;
13409 min_hi = 0;
13411 else
13413 max_lo = signed_max_lo;
13414 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13415 min_hi = -1;
13418 else
13420 width -= HOST_BITS_PER_WIDE_INT;
13421 signed_max_lo = -1;
13422 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13423 - 1;
13424 max_lo = -1;
13425 min_lo = 0;
13427 if (TYPE_UNSIGNED (arg1_type))
13429 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13430 min_hi = 0;
13432 else
13434 max_hi = signed_max_hi;
13435 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13439 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13440 && TREE_INT_CST_LOW (arg1) == max_lo)
13441 switch (code)
13443 case GT_EXPR:
13444 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13446 case GE_EXPR:
13447 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13449 case LE_EXPR:
13450 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13452 case LT_EXPR:
13453 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13455 /* The GE_EXPR and LT_EXPR cases above are not normally
13456 reached because of previous transformations. */
13458 default:
13459 break;
13461 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13462 == max_hi
13463 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13464 switch (code)
13466 case GT_EXPR:
13467 arg1 = const_binop (PLUS_EXPR, arg1,
13468 build_int_cst (TREE_TYPE (arg1), 1));
13469 return fold_build2_loc (loc, EQ_EXPR, type,
13470 fold_convert_loc (loc,
13471 TREE_TYPE (arg1), arg0),
13472 arg1);
13473 case LE_EXPR:
13474 arg1 = const_binop (PLUS_EXPR, arg1,
13475 build_int_cst (TREE_TYPE (arg1), 1));
13476 return fold_build2_loc (loc, NE_EXPR, type,
13477 fold_convert_loc (loc, TREE_TYPE (arg1),
13478 arg0),
13479 arg1);
13480 default:
13481 break;
13483 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13484 == min_hi
13485 && TREE_INT_CST_LOW (arg1) == min_lo)
13486 switch (code)
13488 case LT_EXPR:
13489 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13491 case LE_EXPR:
13492 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13494 case GE_EXPR:
13495 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13497 case GT_EXPR:
13498 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13500 default:
13501 break;
13503 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13504 == min_hi
13505 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13506 switch (code)
13508 case GE_EXPR:
13509 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13510 return fold_build2_loc (loc, NE_EXPR, type,
13511 fold_convert_loc (loc,
13512 TREE_TYPE (arg1), arg0),
13513 arg1);
13514 case LT_EXPR:
13515 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13516 return fold_build2_loc (loc, EQ_EXPR, type,
13517 fold_convert_loc (loc, TREE_TYPE (arg1),
13518 arg0),
13519 arg1);
13520 default:
13521 break;
13524 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13525 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13526 && TYPE_UNSIGNED (arg1_type)
13527 /* We will flip the signedness of the comparison operator
13528 associated with the mode of arg1, so the sign bit is
13529 specified by this mode. Check that arg1 is the signed
13530 max associated with this sign bit. */
13531 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13532 /* signed_type does not work on pointer types. */
13533 && INTEGRAL_TYPE_P (arg1_type))
13535 /* The following case also applies to X < signed_max+1
13536 and X >= signed_max+1 because previous transformations. */
13537 if (code == LE_EXPR || code == GT_EXPR)
13539 tree st;
13540 st = signed_type_for (TREE_TYPE (arg1));
13541 return fold_build2_loc (loc,
13542 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13543 type, fold_convert_loc (loc, st, arg0),
13544 build_int_cst (st, 0));
13550 /* If we are comparing an ABS_EXPR with a constant, we can
13551 convert all the cases into explicit comparisons, but they may
13552 well not be faster than doing the ABS and one comparison.
13553 But ABS (X) <= C is a range comparison, which becomes a subtraction
13554 and a comparison, and is probably faster. */
13555 if (code == LE_EXPR
13556 && TREE_CODE (arg1) == INTEGER_CST
13557 && TREE_CODE (arg0) == ABS_EXPR
13558 && ! TREE_SIDE_EFFECTS (arg0)
13559 && (0 != (tem = negate_expr (arg1)))
13560 && TREE_CODE (tem) == INTEGER_CST
13561 && !TREE_OVERFLOW (tem))
13562 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13563 build2 (GE_EXPR, type,
13564 TREE_OPERAND (arg0, 0), tem),
13565 build2 (LE_EXPR, type,
13566 TREE_OPERAND (arg0, 0), arg1));
13568 /* Convert ABS_EXPR<x> >= 0 to true. */
13569 strict_overflow_p = false;
13570 if (code == GE_EXPR
13571 && (integer_zerop (arg1)
13572 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13573 && real_zerop (arg1)))
13574 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13576 if (strict_overflow_p)
13577 fold_overflow_warning (("assuming signed overflow does not occur "
13578 "when simplifying comparison of "
13579 "absolute value and zero"),
13580 WARN_STRICT_OVERFLOW_CONDITIONAL);
13581 return omit_one_operand_loc (loc, type,
13582 constant_boolean_node (true, type),
13583 arg0);
13586 /* Convert ABS_EXPR<x> < 0 to false. */
13587 strict_overflow_p = false;
13588 if (code == LT_EXPR
13589 && (integer_zerop (arg1) || real_zerop (arg1))
13590 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13592 if (strict_overflow_p)
13593 fold_overflow_warning (("assuming signed overflow does not occur "
13594 "when simplifying comparison of "
13595 "absolute value and zero"),
13596 WARN_STRICT_OVERFLOW_CONDITIONAL);
13597 return omit_one_operand_loc (loc, type,
13598 constant_boolean_node (false, type),
13599 arg0);
13602 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13603 and similarly for >= into !=. */
13604 if ((code == LT_EXPR || code == GE_EXPR)
13605 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13606 && TREE_CODE (arg1) == LSHIFT_EXPR
13607 && integer_onep (TREE_OPERAND (arg1, 0)))
13608 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13609 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13610 TREE_OPERAND (arg1, 1)),
13611 build_zero_cst (TREE_TYPE (arg0)));
13613 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13614 otherwise Y might be >= # of bits in X's type and thus e.g.
13615 (unsigned char) (1 << Y) for Y 15 might be 0.
13616 If the cast is widening, then 1 << Y should have unsigned type,
13617 otherwise if Y is number of bits in the signed shift type minus 1,
13618 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13619 31 might be 0xffffffff80000000. */
13620 if ((code == LT_EXPR || code == GE_EXPR)
13621 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13622 && CONVERT_EXPR_P (arg1)
13623 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13624 && (TYPE_PRECISION (TREE_TYPE (arg1))
13625 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13626 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13627 || (TYPE_PRECISION (TREE_TYPE (arg1))
13628 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13629 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13631 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13632 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13633 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13634 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13635 build_zero_cst (TREE_TYPE (arg0)));
13638 return NULL_TREE;
13640 case UNORDERED_EXPR:
13641 case ORDERED_EXPR:
13642 case UNLT_EXPR:
13643 case UNLE_EXPR:
13644 case UNGT_EXPR:
13645 case UNGE_EXPR:
13646 case UNEQ_EXPR:
13647 case LTGT_EXPR:
13648 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13650 t1 = fold_relational_const (code, type, arg0, arg1);
13651 if (t1 != NULL_TREE)
13652 return t1;
13655 /* If the first operand is NaN, the result is constant. */
13656 if (TREE_CODE (arg0) == REAL_CST
13657 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13658 && (code != LTGT_EXPR || ! flag_trapping_math))
13660 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13661 ? integer_zero_node
13662 : integer_one_node;
13663 return omit_one_operand_loc (loc, type, t1, arg1);
13666 /* If the second operand is NaN, the result is constant. */
13667 if (TREE_CODE (arg1) == REAL_CST
13668 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13669 && (code != LTGT_EXPR || ! flag_trapping_math))
13671 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13672 ? integer_zero_node
13673 : integer_one_node;
13674 return omit_one_operand_loc (loc, type, t1, arg0);
13677 /* Simplify unordered comparison of something with itself. */
13678 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13679 && operand_equal_p (arg0, arg1, 0))
13680 return constant_boolean_node (1, type);
13682 if (code == LTGT_EXPR
13683 && !flag_trapping_math
13684 && operand_equal_p (arg0, arg1, 0))
13685 return constant_boolean_node (0, type);
13687 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13689 tree targ0 = strip_float_extensions (arg0);
13690 tree targ1 = strip_float_extensions (arg1);
13691 tree newtype = TREE_TYPE (targ0);
13693 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13694 newtype = TREE_TYPE (targ1);
13696 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13697 return fold_build2_loc (loc, code, type,
13698 fold_convert_loc (loc, newtype, targ0),
13699 fold_convert_loc (loc, newtype, targ1));
13702 return NULL_TREE;
13704 case COMPOUND_EXPR:
13705 /* When pedantic, a compound expression can be neither an lvalue
13706 nor an integer constant expression. */
13707 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13708 return NULL_TREE;
13709 /* Don't let (0, 0) be null pointer constant. */
13710 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13711 : fold_convert_loc (loc, type, arg1);
13712 return pedantic_non_lvalue_loc (loc, tem);
13714 case COMPLEX_EXPR:
13715 if ((TREE_CODE (arg0) == REAL_CST
13716 && TREE_CODE (arg1) == REAL_CST)
13717 || (TREE_CODE (arg0) == INTEGER_CST
13718 && TREE_CODE (arg1) == INTEGER_CST))
13719 return build_complex (type, arg0, arg1);
13720 if (TREE_CODE (arg0) == REALPART_EXPR
13721 && TREE_CODE (arg1) == IMAGPART_EXPR
13722 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13723 && operand_equal_p (TREE_OPERAND (arg0, 0),
13724 TREE_OPERAND (arg1, 0), 0))
13725 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13726 TREE_OPERAND (arg1, 0));
13727 return NULL_TREE;
13729 case ASSERT_EXPR:
13730 /* An ASSERT_EXPR should never be passed to fold_binary. */
13731 gcc_unreachable ();
13733 case VEC_PACK_TRUNC_EXPR:
13734 case VEC_PACK_FIX_TRUNC_EXPR:
13736 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13737 tree *elts;
13739 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13740 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13741 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13742 return NULL_TREE;
13744 elts = XALLOCAVEC (tree, nelts);
13745 if (!vec_cst_ctor_to_array (arg0, elts)
13746 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13747 return NULL_TREE;
13749 for (i = 0; i < nelts; i++)
13751 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13752 ? NOP_EXPR : FIX_TRUNC_EXPR,
13753 TREE_TYPE (type), elts[i]);
13754 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13755 return NULL_TREE;
13758 return build_vector (type, elts);
13761 case VEC_WIDEN_MULT_LO_EXPR:
13762 case VEC_WIDEN_MULT_HI_EXPR:
13763 case VEC_WIDEN_MULT_EVEN_EXPR:
13764 case VEC_WIDEN_MULT_ODD_EXPR:
13766 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13767 unsigned int out, ofs, scale;
13768 tree *elts;
13770 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13771 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13772 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13773 return NULL_TREE;
13775 elts = XALLOCAVEC (tree, nelts * 4);
13776 if (!vec_cst_ctor_to_array (arg0, elts)
13777 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13778 return NULL_TREE;
13780 if (code == VEC_WIDEN_MULT_LO_EXPR)
13781 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13782 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13783 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13784 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13785 scale = 1, ofs = 0;
13786 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13787 scale = 1, ofs = 1;
13789 for (out = 0; out < nelts; out++)
13791 unsigned int in1 = (out << scale) + ofs;
13792 unsigned int in2 = in1 + nelts * 2;
13793 tree t1, t2;
13795 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13796 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13798 if (t1 == NULL_TREE || t2 == NULL_TREE)
13799 return NULL_TREE;
13800 elts[out] = const_binop (MULT_EXPR, t1, t2);
13801 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13802 return NULL_TREE;
13805 return build_vector (type, elts);
13808 default:
13809 return NULL_TREE;
13810 } /* switch (code) */
13813 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13814 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13815 of GOTO_EXPR. */
13817 static tree
13818 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13820 switch (TREE_CODE (*tp))
13822 case LABEL_EXPR:
13823 return *tp;
13825 case GOTO_EXPR:
13826 *walk_subtrees = 0;
13828 /* ... fall through ... */
13830 default:
13831 return NULL_TREE;
13835 /* Return whether the sub-tree ST contains a label which is accessible from
13836 outside the sub-tree. */
13838 static bool
13839 contains_label_p (tree st)
13841 return
13842 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13845 /* Fold a ternary expression of code CODE and type TYPE with operands
13846 OP0, OP1, and OP2. Return the folded expression if folding is
13847 successful. Otherwise, return NULL_TREE. */
13849 tree
13850 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13851 tree op0, tree op1, tree op2)
13853 tree tem;
13854 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13855 enum tree_code_class kind = TREE_CODE_CLASS (code);
13857 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13858 && TREE_CODE_LENGTH (code) == 3);
13860 /* Strip any conversions that don't change the mode. This is safe
13861 for every expression, except for a comparison expression because
13862 its signedness is derived from its operands. So, in the latter
13863 case, only strip conversions that don't change the signedness.
13865 Note that this is done as an internal manipulation within the
13866 constant folder, in order to find the simplest representation of
13867 the arguments so that their form can be studied. In any cases,
13868 the appropriate type conversions should be put back in the tree
13869 that will get out of the constant folder. */
13870 if (op0)
13872 arg0 = op0;
13873 STRIP_NOPS (arg0);
13876 if (op1)
13878 arg1 = op1;
13879 STRIP_NOPS (arg1);
13882 if (op2)
13884 arg2 = op2;
13885 STRIP_NOPS (arg2);
13888 switch (code)
13890 case COMPONENT_REF:
13891 if (TREE_CODE (arg0) == CONSTRUCTOR
13892 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13894 unsigned HOST_WIDE_INT idx;
13895 tree field, value;
13896 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13897 if (field == arg1)
13898 return value;
13900 return NULL_TREE;
13902 case COND_EXPR:
13903 case VEC_COND_EXPR:
13904 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13905 so all simple results must be passed through pedantic_non_lvalue. */
13906 if (TREE_CODE (arg0) == INTEGER_CST)
13908 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13909 tem = integer_zerop (arg0) ? op2 : op1;
13910 /* Only optimize constant conditions when the selected branch
13911 has the same type as the COND_EXPR. This avoids optimizing
13912 away "c ? x : throw", where the throw has a void type.
13913 Avoid throwing away that operand which contains label. */
13914 if ((!TREE_SIDE_EFFECTS (unused_op)
13915 || !contains_label_p (unused_op))
13916 && (! VOID_TYPE_P (TREE_TYPE (tem))
13917 || VOID_TYPE_P (type)))
13918 return pedantic_non_lvalue_loc (loc, tem);
13919 return NULL_TREE;
13921 else if (TREE_CODE (arg0) == VECTOR_CST)
13923 if (integer_all_onesp (arg0))
13924 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13925 if (integer_zerop (arg0))
13926 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13928 if ((TREE_CODE (arg1) == VECTOR_CST
13929 || TREE_CODE (arg1) == CONSTRUCTOR)
13930 && (TREE_CODE (arg2) == VECTOR_CST
13931 || TREE_CODE (arg2) == CONSTRUCTOR))
13933 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13934 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13935 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13936 for (i = 0; i < nelts; i++)
13938 tree val = VECTOR_CST_ELT (arg0, i);
13939 if (integer_all_onesp (val))
13940 sel[i] = i;
13941 else if (integer_zerop (val))
13942 sel[i] = nelts + i;
13943 else /* Currently unreachable. */
13944 return NULL_TREE;
13946 tree t = fold_vec_perm (type, arg1, arg2, sel);
13947 if (t != NULL_TREE)
13948 return t;
13952 if (operand_equal_p (arg1, op2, 0))
13953 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13955 /* If we have A op B ? A : C, we may be able to convert this to a
13956 simpler expression, depending on the operation and the values
13957 of B and C. Signed zeros prevent all of these transformations,
13958 for reasons given above each one.
13960 Also try swapping the arguments and inverting the conditional. */
13961 if (COMPARISON_CLASS_P (arg0)
13962 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13963 arg1, TREE_OPERAND (arg0, 1))
13964 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13966 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13967 if (tem)
13968 return tem;
13971 if (COMPARISON_CLASS_P (arg0)
13972 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13973 op2,
13974 TREE_OPERAND (arg0, 1))
13975 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13977 location_t loc0 = expr_location_or (arg0, loc);
13978 tem = fold_truth_not_expr (loc0, arg0);
13979 if (tem && COMPARISON_CLASS_P (tem))
13981 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13982 if (tem)
13983 return tem;
13987 /* ??? Fixup the code below for VEC_COND_EXPR. */
13988 if (code == VEC_COND_EXPR)
13989 return NULL_TREE;
13991 /* If the second operand is simpler than the third, swap them
13992 since that produces better jump optimization results. */
13993 if (truth_value_p (TREE_CODE (arg0))
13994 && tree_swap_operands_p (op1, op2, false))
13996 location_t loc0 = expr_location_or (arg0, loc);
13997 /* See if this can be inverted. If it can't, possibly because
13998 it was a floating-point inequality comparison, don't do
13999 anything. */
14000 tem = fold_truth_not_expr (loc0, arg0);
14001 if (tem)
14002 return fold_build3_loc (loc, code, type, tem, op2, op1);
14005 /* Convert A ? 1 : 0 to simply A. */
14006 if (integer_onep (op1)
14007 && integer_zerop (op2)
14008 /* If we try to convert OP0 to our type, the
14009 call to fold will try to move the conversion inside
14010 a COND, which will recurse. In that case, the COND_EXPR
14011 is probably the best choice, so leave it alone. */
14012 && type == TREE_TYPE (arg0))
14013 return pedantic_non_lvalue_loc (loc, arg0);
14015 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14016 over COND_EXPR in cases such as floating point comparisons. */
14017 if (integer_zerop (op1)
14018 && integer_onep (op2)
14019 && truth_value_p (TREE_CODE (arg0)))
14020 return pedantic_non_lvalue_loc (loc,
14021 fold_convert_loc (loc, type,
14022 invert_truthvalue_loc (loc,
14023 arg0)));
14025 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14026 if (TREE_CODE (arg0) == LT_EXPR
14027 && integer_zerop (TREE_OPERAND (arg0, 1))
14028 && integer_zerop (op2)
14029 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14031 /* sign_bit_p only checks ARG1 bits within A's precision.
14032 If <sign bit of A> has wider type than A, bits outside
14033 of A's precision in <sign bit of A> need to be checked.
14034 If they are all 0, this optimization needs to be done
14035 in unsigned A's type, if they are all 1 in signed A's type,
14036 otherwise this can't be done. */
14037 if (TYPE_PRECISION (TREE_TYPE (tem))
14038 < TYPE_PRECISION (TREE_TYPE (arg1))
14039 && TYPE_PRECISION (TREE_TYPE (tem))
14040 < TYPE_PRECISION (type))
14042 unsigned HOST_WIDE_INT mask_lo;
14043 HOST_WIDE_INT mask_hi;
14044 int inner_width, outer_width;
14045 tree tem_type;
14047 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14048 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14049 if (outer_width > TYPE_PRECISION (type))
14050 outer_width = TYPE_PRECISION (type);
14052 if (outer_width > HOST_BITS_PER_WIDE_INT)
14054 mask_hi = ((unsigned HOST_WIDE_INT) -1
14055 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14056 mask_lo = -1;
14058 else
14060 mask_hi = 0;
14061 mask_lo = ((unsigned HOST_WIDE_INT) -1
14062 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14064 if (inner_width > HOST_BITS_PER_WIDE_INT)
14066 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14067 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14068 mask_lo = 0;
14070 else
14071 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14072 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14074 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14075 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14077 tem_type = signed_type_for (TREE_TYPE (tem));
14078 tem = fold_convert_loc (loc, tem_type, tem);
14080 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14081 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14083 tem_type = unsigned_type_for (TREE_TYPE (tem));
14084 tem = fold_convert_loc (loc, tem_type, tem);
14086 else
14087 tem = NULL;
14090 if (tem)
14091 return
14092 fold_convert_loc (loc, type,
14093 fold_build2_loc (loc, BIT_AND_EXPR,
14094 TREE_TYPE (tem), tem,
14095 fold_convert_loc (loc,
14096 TREE_TYPE (tem),
14097 arg1)));
14100 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14101 already handled above. */
14102 if (TREE_CODE (arg0) == BIT_AND_EXPR
14103 && integer_onep (TREE_OPERAND (arg0, 1))
14104 && integer_zerop (op2)
14105 && integer_pow2p (arg1))
14107 tree tem = TREE_OPERAND (arg0, 0);
14108 STRIP_NOPS (tem);
14109 if (TREE_CODE (tem) == RSHIFT_EXPR
14110 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14111 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14112 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14113 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14114 TREE_OPERAND (tem, 0), arg1);
14117 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14118 is probably obsolete because the first operand should be a
14119 truth value (that's why we have the two cases above), but let's
14120 leave it in until we can confirm this for all front-ends. */
14121 if (integer_zerop (op2)
14122 && TREE_CODE (arg0) == NE_EXPR
14123 && integer_zerop (TREE_OPERAND (arg0, 1))
14124 && integer_pow2p (arg1)
14125 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14126 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14127 arg1, OEP_ONLY_CONST))
14128 return pedantic_non_lvalue_loc (loc,
14129 fold_convert_loc (loc, type,
14130 TREE_OPERAND (arg0, 0)));
14132 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14133 if (integer_zerop (op2)
14134 && truth_value_p (TREE_CODE (arg0))
14135 && truth_value_p (TREE_CODE (arg1)))
14136 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14137 fold_convert_loc (loc, type, arg0),
14138 arg1);
14140 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14141 if (integer_onep (op2)
14142 && truth_value_p (TREE_CODE (arg0))
14143 && truth_value_p (TREE_CODE (arg1)))
14145 location_t loc0 = expr_location_or (arg0, loc);
14146 /* Only perform transformation if ARG0 is easily inverted. */
14147 tem = fold_truth_not_expr (loc0, arg0);
14148 if (tem)
14149 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14150 fold_convert_loc (loc, type, tem),
14151 arg1);
14154 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14155 if (integer_zerop (arg1)
14156 && truth_value_p (TREE_CODE (arg0))
14157 && truth_value_p (TREE_CODE (op2)))
14159 location_t loc0 = expr_location_or (arg0, loc);
14160 /* Only perform transformation if ARG0 is easily inverted. */
14161 tem = fold_truth_not_expr (loc0, arg0);
14162 if (tem)
14163 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14164 fold_convert_loc (loc, type, tem),
14165 op2);
14168 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14169 if (integer_onep (arg1)
14170 && truth_value_p (TREE_CODE (arg0))
14171 && truth_value_p (TREE_CODE (op2)))
14172 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14173 fold_convert_loc (loc, type, arg0),
14174 op2);
14176 return NULL_TREE;
14178 case CALL_EXPR:
14179 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14180 of fold_ternary on them. */
14181 gcc_unreachable ();
14183 case BIT_FIELD_REF:
14184 if ((TREE_CODE (arg0) == VECTOR_CST
14185 || (TREE_CODE (arg0) == CONSTRUCTOR
14186 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14187 && (type == TREE_TYPE (TREE_TYPE (arg0))
14188 || (TREE_CODE (type) == VECTOR_TYPE
14189 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14191 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14192 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14193 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14194 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14196 if (n != 0
14197 && (idx % width) == 0
14198 && (n % width) == 0
14199 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14201 idx = idx / width;
14202 n = n / width;
14204 if (TREE_CODE (arg0) == VECTOR_CST)
14206 if (n == 1)
14207 return VECTOR_CST_ELT (arg0, idx);
14209 tree *vals = XALLOCAVEC (tree, n);
14210 for (unsigned i = 0; i < n; ++i)
14211 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14212 return build_vector (type, vals);
14215 /* Constructor elements can be subvectors. */
14216 unsigned HOST_WIDE_INT k = 1;
14217 if (CONSTRUCTOR_NELTS (arg0) != 0)
14219 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14220 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14221 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14224 /* We keep an exact subset of the constructor elements. */
14225 if ((idx % k) == 0 && (n % k) == 0)
14227 if (CONSTRUCTOR_NELTS (arg0) == 0)
14228 return build_constructor (type, NULL);
14229 idx /= k;
14230 n /= k;
14231 if (n == 1)
14233 if (idx < CONSTRUCTOR_NELTS (arg0))
14234 return CONSTRUCTOR_ELT (arg0, idx)->value;
14235 return build_zero_cst (type);
14238 vec<constructor_elt, va_gc> *vals;
14239 vec_alloc (vals, n);
14240 for (unsigned i = 0;
14241 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14242 ++i)
14243 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14244 CONSTRUCTOR_ELT
14245 (arg0, idx + i)->value);
14246 return build_constructor (type, vals);
14248 /* The bitfield references a single constructor element. */
14249 else if (idx + n <= (idx / k + 1) * k)
14251 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14252 return build_zero_cst (type);
14253 else if (n == k)
14254 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14255 else
14256 return fold_build3_loc (loc, code, type,
14257 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14258 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14263 /* A bit-field-ref that referenced the full argument can be stripped. */
14264 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14265 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14266 && integer_zerop (op2))
14267 return fold_convert_loc (loc, type, arg0);
14269 /* On constants we can use native encode/interpret to constant
14270 fold (nearly) all BIT_FIELD_REFs. */
14271 if (CONSTANT_CLASS_P (arg0)
14272 && can_native_interpret_type_p (type)
14273 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14274 /* This limitation should not be necessary, we just need to
14275 round this up to mode size. */
14276 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14277 /* Need bit-shifting of the buffer to relax the following. */
14278 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14280 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14281 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14282 unsigned HOST_WIDE_INT clen;
14283 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14284 /* ??? We cannot tell native_encode_expr to start at
14285 some random byte only. So limit us to a reasonable amount
14286 of work. */
14287 if (clen <= 4096)
14289 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14290 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14291 if (len > 0
14292 && len * BITS_PER_UNIT >= bitpos + bitsize)
14294 tree v = native_interpret_expr (type,
14295 b + bitpos / BITS_PER_UNIT,
14296 bitsize / BITS_PER_UNIT);
14297 if (v)
14298 return v;
14303 return NULL_TREE;
14305 case FMA_EXPR:
14306 /* For integers we can decompose the FMA if possible. */
14307 if (TREE_CODE (arg0) == INTEGER_CST
14308 && TREE_CODE (arg1) == INTEGER_CST)
14309 return fold_build2_loc (loc, PLUS_EXPR, type,
14310 const_binop (MULT_EXPR, arg0, arg1), arg2);
14311 if (integer_zerop (arg2))
14312 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14314 return fold_fma (loc, type, arg0, arg1, arg2);
14316 case VEC_PERM_EXPR:
14317 if (TREE_CODE (arg2) == VECTOR_CST)
14319 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14320 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14321 tree t;
14322 bool need_mask_canon = false;
14323 bool all_in_vec0 = true;
14324 bool all_in_vec1 = true;
14325 bool maybe_identity = true;
14326 bool single_arg = (op0 == op1);
14327 bool changed = false;
14329 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14330 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14331 for (i = 0; i < nelts; i++)
14333 tree val = VECTOR_CST_ELT (arg2, i);
14334 if (TREE_CODE (val) != INTEGER_CST)
14335 return NULL_TREE;
14337 sel[i] = TREE_INT_CST_LOW (val) & mask;
14338 if (TREE_INT_CST_HIGH (val)
14339 || ((unsigned HOST_WIDE_INT)
14340 TREE_INT_CST_LOW (val) != sel[i]))
14341 need_mask_canon = true;
14343 if (sel[i] < nelts)
14344 all_in_vec1 = false;
14345 else
14346 all_in_vec0 = false;
14348 if ((sel[i] & (nelts-1)) != i)
14349 maybe_identity = false;
14352 if (maybe_identity)
14354 if (all_in_vec0)
14355 return op0;
14356 if (all_in_vec1)
14357 return op1;
14360 if (all_in_vec0)
14361 op1 = op0;
14362 else if (all_in_vec1)
14364 op0 = op1;
14365 for (i = 0; i < nelts; i++)
14366 sel[i] -= nelts;
14367 need_mask_canon = true;
14370 if ((TREE_CODE (op0) == VECTOR_CST
14371 || TREE_CODE (op0) == CONSTRUCTOR)
14372 && (TREE_CODE (op1) == VECTOR_CST
14373 || TREE_CODE (op1) == CONSTRUCTOR))
14375 t = fold_vec_perm (type, op0, op1, sel);
14376 if (t != NULL_TREE)
14377 return t;
14380 if (op0 == op1 && !single_arg)
14381 changed = true;
14383 if (need_mask_canon && arg2 == op2)
14385 tree *tsel = XALLOCAVEC (tree, nelts);
14386 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14387 for (i = 0; i < nelts; i++)
14388 tsel[i] = build_int_cst (eltype, sel[i]);
14389 op2 = build_vector (TREE_TYPE (arg2), tsel);
14390 changed = true;
14393 if (changed)
14394 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14396 return NULL_TREE;
14398 default:
14399 return NULL_TREE;
14400 } /* switch (code) */
14403 /* Perform constant folding and related simplification of EXPR.
14404 The related simplifications include x*1 => x, x*0 => 0, etc.,
14405 and application of the associative law.
14406 NOP_EXPR conversions may be removed freely (as long as we
14407 are careful not to change the type of the overall expression).
14408 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14409 but we can constant-fold them if they have constant operands. */
14411 #ifdef ENABLE_FOLD_CHECKING
14412 # define fold(x) fold_1 (x)
14413 static tree fold_1 (tree);
14414 static
14415 #endif
14416 tree
14417 fold (tree expr)
14419 const tree t = expr;
14420 enum tree_code code = TREE_CODE (t);
14421 enum tree_code_class kind = TREE_CODE_CLASS (code);
14422 tree tem;
14423 location_t loc = EXPR_LOCATION (expr);
14425 /* Return right away if a constant. */
14426 if (kind == tcc_constant)
14427 return t;
14429 /* CALL_EXPR-like objects with variable numbers of operands are
14430 treated specially. */
14431 if (kind == tcc_vl_exp)
14433 if (code == CALL_EXPR)
14435 tem = fold_call_expr (loc, expr, false);
14436 return tem ? tem : expr;
14438 return expr;
14441 if (IS_EXPR_CODE_CLASS (kind))
14443 tree type = TREE_TYPE (t);
14444 tree op0, op1, op2;
14446 switch (TREE_CODE_LENGTH (code))
14448 case 1:
14449 op0 = TREE_OPERAND (t, 0);
14450 tem = fold_unary_loc (loc, code, type, op0);
14451 return tem ? tem : expr;
14452 case 2:
14453 op0 = TREE_OPERAND (t, 0);
14454 op1 = TREE_OPERAND (t, 1);
14455 tem = fold_binary_loc (loc, code, type, op0, op1);
14456 return tem ? tem : expr;
14457 case 3:
14458 op0 = TREE_OPERAND (t, 0);
14459 op1 = TREE_OPERAND (t, 1);
14460 op2 = TREE_OPERAND (t, 2);
14461 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14462 return tem ? tem : expr;
14463 default:
14464 break;
14468 switch (code)
14470 case ARRAY_REF:
14472 tree op0 = TREE_OPERAND (t, 0);
14473 tree op1 = TREE_OPERAND (t, 1);
14475 if (TREE_CODE (op1) == INTEGER_CST
14476 && TREE_CODE (op0) == CONSTRUCTOR
14477 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14479 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14480 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14481 unsigned HOST_WIDE_INT begin = 0;
14483 /* Find a matching index by means of a binary search. */
14484 while (begin != end)
14486 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14487 tree index = (*elts)[middle].index;
14489 if (TREE_CODE (index) == INTEGER_CST
14490 && tree_int_cst_lt (index, op1))
14491 begin = middle + 1;
14492 else if (TREE_CODE (index) == INTEGER_CST
14493 && tree_int_cst_lt (op1, index))
14494 end = middle;
14495 else if (TREE_CODE (index) == RANGE_EXPR
14496 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14497 begin = middle + 1;
14498 else if (TREE_CODE (index) == RANGE_EXPR
14499 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14500 end = middle;
14501 else
14502 return (*elts)[middle].value;
14506 return t;
14509 /* Return a VECTOR_CST if possible. */
14510 case CONSTRUCTOR:
14512 tree type = TREE_TYPE (t);
14513 if (TREE_CODE (type) != VECTOR_TYPE)
14514 return t;
14516 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14517 unsigned HOST_WIDE_INT idx, pos = 0;
14518 tree value;
14520 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14522 if (!CONSTANT_CLASS_P (value))
14523 return t;
14524 if (TREE_CODE (value) == VECTOR_CST)
14526 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14527 vec[pos++] = VECTOR_CST_ELT (value, i);
14529 else
14530 vec[pos++] = value;
14532 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14533 vec[pos] = build_zero_cst (TREE_TYPE (type));
14535 return build_vector (type, vec);
14538 case CONST_DECL:
14539 return fold (DECL_INITIAL (t));
14541 default:
14542 return t;
14543 } /* switch (code) */
14546 #ifdef ENABLE_FOLD_CHECKING
14547 #undef fold
14549 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14550 hash_table <pointer_hash <tree_node> >);
14551 static void fold_check_failed (const_tree, const_tree);
14552 void print_fold_checksum (const_tree);
14554 /* When --enable-checking=fold, compute a digest of expr before
14555 and after actual fold call to see if fold did not accidentally
14556 change original expr. */
14558 tree
14559 fold (tree expr)
14561 tree ret;
14562 struct md5_ctx ctx;
14563 unsigned char checksum_before[16], checksum_after[16];
14564 hash_table <pointer_hash <tree_node> > ht;
14566 ht.create (32);
14567 md5_init_ctx (&ctx);
14568 fold_checksum_tree (expr, &ctx, ht);
14569 md5_finish_ctx (&ctx, checksum_before);
14570 ht.empty ();
14572 ret = fold_1 (expr);
14574 md5_init_ctx (&ctx);
14575 fold_checksum_tree (expr, &ctx, ht);
14576 md5_finish_ctx (&ctx, checksum_after);
14577 ht.dispose ();
14579 if (memcmp (checksum_before, checksum_after, 16))
14580 fold_check_failed (expr, ret);
14582 return ret;
14585 void
14586 print_fold_checksum (const_tree expr)
14588 struct md5_ctx ctx;
14589 unsigned char checksum[16], cnt;
14590 hash_table <pointer_hash <tree_node> > ht;
14592 ht.create (32);
14593 md5_init_ctx (&ctx);
14594 fold_checksum_tree (expr, &ctx, ht);
14595 md5_finish_ctx (&ctx, checksum);
14596 ht.dispose ();
14597 for (cnt = 0; cnt < 16; ++cnt)
14598 fprintf (stderr, "%02x", checksum[cnt]);
14599 putc ('\n', stderr);
14602 static void
14603 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14605 internal_error ("fold check: original tree changed by fold");
14608 static void
14609 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14610 hash_table <pointer_hash <tree_node> > ht)
14612 tree_node **slot;
14613 enum tree_code code;
14614 union tree_node buf;
14615 int i, len;
14617 recursive_label:
14618 if (expr == NULL)
14619 return;
14620 slot = ht.find_slot (expr, INSERT);
14621 if (*slot != NULL)
14622 return;
14623 *slot = CONST_CAST_TREE (expr);
14624 code = TREE_CODE (expr);
14625 if (TREE_CODE_CLASS (code) == tcc_declaration
14626 && DECL_ASSEMBLER_NAME_SET_P (expr))
14628 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14629 memcpy ((char *) &buf, expr, tree_size (expr));
14630 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14631 expr = (tree) &buf;
14633 else if (TREE_CODE_CLASS (code) == tcc_type
14634 && (TYPE_POINTER_TO (expr)
14635 || TYPE_REFERENCE_TO (expr)
14636 || TYPE_CACHED_VALUES_P (expr)
14637 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14638 || TYPE_NEXT_VARIANT (expr)))
14640 /* Allow these fields to be modified. */
14641 tree tmp;
14642 memcpy ((char *) &buf, expr, tree_size (expr));
14643 expr = tmp = (tree) &buf;
14644 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14645 TYPE_POINTER_TO (tmp) = NULL;
14646 TYPE_REFERENCE_TO (tmp) = NULL;
14647 TYPE_NEXT_VARIANT (tmp) = NULL;
14648 if (TYPE_CACHED_VALUES_P (tmp))
14650 TYPE_CACHED_VALUES_P (tmp) = 0;
14651 TYPE_CACHED_VALUES (tmp) = NULL;
14654 md5_process_bytes (expr, tree_size (expr), ctx);
14655 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14656 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14657 if (TREE_CODE_CLASS (code) != tcc_type
14658 && TREE_CODE_CLASS (code) != tcc_declaration
14659 && code != TREE_LIST
14660 && code != SSA_NAME
14661 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14662 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14663 switch (TREE_CODE_CLASS (code))
14665 case tcc_constant:
14666 switch (code)
14668 case STRING_CST:
14669 md5_process_bytes (TREE_STRING_POINTER (expr),
14670 TREE_STRING_LENGTH (expr), ctx);
14671 break;
14672 case COMPLEX_CST:
14673 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14674 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14675 break;
14676 case VECTOR_CST:
14677 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14678 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14679 break;
14680 default:
14681 break;
14683 break;
14684 case tcc_exceptional:
14685 switch (code)
14687 case TREE_LIST:
14688 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14689 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14690 expr = TREE_CHAIN (expr);
14691 goto recursive_label;
14692 break;
14693 case TREE_VEC:
14694 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14695 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14696 break;
14697 default:
14698 break;
14700 break;
14701 case tcc_expression:
14702 case tcc_reference:
14703 case tcc_comparison:
14704 case tcc_unary:
14705 case tcc_binary:
14706 case tcc_statement:
14707 case tcc_vl_exp:
14708 len = TREE_OPERAND_LENGTH (expr);
14709 for (i = 0; i < len; ++i)
14710 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14711 break;
14712 case tcc_declaration:
14713 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14714 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14715 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14717 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14718 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14719 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14720 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14721 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14723 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14724 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14726 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14728 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14729 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14730 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14732 break;
14733 case tcc_type:
14734 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14735 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14736 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14737 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14738 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14739 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14740 if (INTEGRAL_TYPE_P (expr)
14741 || SCALAR_FLOAT_TYPE_P (expr))
14743 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14744 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14746 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14747 if (TREE_CODE (expr) == RECORD_TYPE
14748 || TREE_CODE (expr) == UNION_TYPE
14749 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14750 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14751 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14752 break;
14753 default:
14754 break;
14758 /* Helper function for outputting the checksum of a tree T. When
14759 debugging with gdb, you can "define mynext" to be "next" followed
14760 by "call debug_fold_checksum (op0)", then just trace down till the
14761 outputs differ. */
14763 DEBUG_FUNCTION void
14764 debug_fold_checksum (const_tree t)
14766 int i;
14767 unsigned char checksum[16];
14768 struct md5_ctx ctx;
14769 hash_table <pointer_hash <tree_node> > ht;
14770 ht.create (32);
14772 md5_init_ctx (&ctx);
14773 fold_checksum_tree (t, &ctx, ht);
14774 md5_finish_ctx (&ctx, checksum);
14775 ht.empty ();
14777 for (i = 0; i < 16; i++)
14778 fprintf (stderr, "%d ", checksum[i]);
14780 fprintf (stderr, "\n");
14783 #endif
14785 /* Fold a unary tree expression with code CODE of type TYPE with an
14786 operand OP0. LOC is the location of the resulting expression.
14787 Return a folded expression if successful. Otherwise, return a tree
14788 expression with code CODE of type TYPE with an operand OP0. */
14790 tree
14791 fold_build1_stat_loc (location_t loc,
14792 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14794 tree tem;
14795 #ifdef ENABLE_FOLD_CHECKING
14796 unsigned char checksum_before[16], checksum_after[16];
14797 struct md5_ctx ctx;
14798 hash_table <pointer_hash <tree_node> > ht;
14800 ht.create (32);
14801 md5_init_ctx (&ctx);
14802 fold_checksum_tree (op0, &ctx, ht);
14803 md5_finish_ctx (&ctx, checksum_before);
14804 ht.empty ();
14805 #endif
14807 tem = fold_unary_loc (loc, code, type, op0);
14808 if (!tem)
14809 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14811 #ifdef ENABLE_FOLD_CHECKING
14812 md5_init_ctx (&ctx);
14813 fold_checksum_tree (op0, &ctx, ht);
14814 md5_finish_ctx (&ctx, checksum_after);
14815 ht.dispose ();
14817 if (memcmp (checksum_before, checksum_after, 16))
14818 fold_check_failed (op0, tem);
14819 #endif
14820 return tem;
14823 /* Fold a binary tree expression with code CODE of type TYPE with
14824 operands OP0 and OP1. LOC is the location of the resulting
14825 expression. Return a folded expression if successful. Otherwise,
14826 return a tree expression with code CODE of type TYPE with operands
14827 OP0 and OP1. */
14829 tree
14830 fold_build2_stat_loc (location_t loc,
14831 enum tree_code code, tree type, tree op0, tree op1
14832 MEM_STAT_DECL)
14834 tree tem;
14835 #ifdef ENABLE_FOLD_CHECKING
14836 unsigned char checksum_before_op0[16],
14837 checksum_before_op1[16],
14838 checksum_after_op0[16],
14839 checksum_after_op1[16];
14840 struct md5_ctx ctx;
14841 hash_table <pointer_hash <tree_node> > ht;
14843 ht.create (32);
14844 md5_init_ctx (&ctx);
14845 fold_checksum_tree (op0, &ctx, ht);
14846 md5_finish_ctx (&ctx, checksum_before_op0);
14847 ht.empty ();
14849 md5_init_ctx (&ctx);
14850 fold_checksum_tree (op1, &ctx, ht);
14851 md5_finish_ctx (&ctx, checksum_before_op1);
14852 ht.empty ();
14853 #endif
14855 tem = fold_binary_loc (loc, code, type, op0, op1);
14856 if (!tem)
14857 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14859 #ifdef ENABLE_FOLD_CHECKING
14860 md5_init_ctx (&ctx);
14861 fold_checksum_tree (op0, &ctx, ht);
14862 md5_finish_ctx (&ctx, checksum_after_op0);
14863 ht.empty ();
14865 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14866 fold_check_failed (op0, tem);
14868 md5_init_ctx (&ctx);
14869 fold_checksum_tree (op1, &ctx, ht);
14870 md5_finish_ctx (&ctx, checksum_after_op1);
14871 ht.dispose ();
14873 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14874 fold_check_failed (op1, tem);
14875 #endif
14876 return tem;
14879 /* Fold a ternary tree expression with code CODE of type TYPE with
14880 operands OP0, OP1, and OP2. Return a folded expression if
14881 successful. Otherwise, return a tree expression with code CODE of
14882 type TYPE with operands OP0, OP1, and OP2. */
14884 tree
14885 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14886 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14888 tree tem;
14889 #ifdef ENABLE_FOLD_CHECKING
14890 unsigned char checksum_before_op0[16],
14891 checksum_before_op1[16],
14892 checksum_before_op2[16],
14893 checksum_after_op0[16],
14894 checksum_after_op1[16],
14895 checksum_after_op2[16];
14896 struct md5_ctx ctx;
14897 hash_table <pointer_hash <tree_node> > ht;
14899 ht.create (32);
14900 md5_init_ctx (&ctx);
14901 fold_checksum_tree (op0, &ctx, ht);
14902 md5_finish_ctx (&ctx, checksum_before_op0);
14903 ht.empty ();
14905 md5_init_ctx (&ctx);
14906 fold_checksum_tree (op1, &ctx, ht);
14907 md5_finish_ctx (&ctx, checksum_before_op1);
14908 ht.empty ();
14910 md5_init_ctx (&ctx);
14911 fold_checksum_tree (op2, &ctx, ht);
14912 md5_finish_ctx (&ctx, checksum_before_op2);
14913 ht.empty ();
14914 #endif
14916 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14917 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14918 if (!tem)
14919 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14921 #ifdef ENABLE_FOLD_CHECKING
14922 md5_init_ctx (&ctx);
14923 fold_checksum_tree (op0, &ctx, ht);
14924 md5_finish_ctx (&ctx, checksum_after_op0);
14925 ht.empty ();
14927 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14928 fold_check_failed (op0, tem);
14930 md5_init_ctx (&ctx);
14931 fold_checksum_tree (op1, &ctx, ht);
14932 md5_finish_ctx (&ctx, checksum_after_op1);
14933 ht.empty ();
14935 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14936 fold_check_failed (op1, tem);
14938 md5_init_ctx (&ctx);
14939 fold_checksum_tree (op2, &ctx, ht);
14940 md5_finish_ctx (&ctx, checksum_after_op2);
14941 ht.dispose ();
14943 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14944 fold_check_failed (op2, tem);
14945 #endif
14946 return tem;
14949 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14950 arguments in ARGARRAY, and a null static chain.
14951 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14952 of type TYPE from the given operands as constructed by build_call_array. */
14954 tree
14955 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14956 int nargs, tree *argarray)
14958 tree tem;
14959 #ifdef ENABLE_FOLD_CHECKING
14960 unsigned char checksum_before_fn[16],
14961 checksum_before_arglist[16],
14962 checksum_after_fn[16],
14963 checksum_after_arglist[16];
14964 struct md5_ctx ctx;
14965 hash_table <pointer_hash <tree_node> > ht;
14966 int i;
14968 ht.create (32);
14969 md5_init_ctx (&ctx);
14970 fold_checksum_tree (fn, &ctx, ht);
14971 md5_finish_ctx (&ctx, checksum_before_fn);
14972 ht.empty ();
14974 md5_init_ctx (&ctx);
14975 for (i = 0; i < nargs; i++)
14976 fold_checksum_tree (argarray[i], &ctx, ht);
14977 md5_finish_ctx (&ctx, checksum_before_arglist);
14978 ht.empty ();
14979 #endif
14981 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14983 #ifdef ENABLE_FOLD_CHECKING
14984 md5_init_ctx (&ctx);
14985 fold_checksum_tree (fn, &ctx, ht);
14986 md5_finish_ctx (&ctx, checksum_after_fn);
14987 ht.empty ();
14989 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14990 fold_check_failed (fn, tem);
14992 md5_init_ctx (&ctx);
14993 for (i = 0; i < nargs; i++)
14994 fold_checksum_tree (argarray[i], &ctx, ht);
14995 md5_finish_ctx (&ctx, checksum_after_arglist);
14996 ht.dispose ();
14998 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14999 fold_check_failed (NULL_TREE, tem);
15000 #endif
15001 return tem;
15004 /* Perform constant folding and related simplification of initializer
15005 expression EXPR. These behave identically to "fold_buildN" but ignore
15006 potential run-time traps and exceptions that fold must preserve. */
15008 #define START_FOLD_INIT \
15009 int saved_signaling_nans = flag_signaling_nans;\
15010 int saved_trapping_math = flag_trapping_math;\
15011 int saved_rounding_math = flag_rounding_math;\
15012 int saved_trapv = flag_trapv;\
15013 int saved_folding_initializer = folding_initializer;\
15014 flag_signaling_nans = 0;\
15015 flag_trapping_math = 0;\
15016 flag_rounding_math = 0;\
15017 flag_trapv = 0;\
15018 folding_initializer = 1;
15020 #define END_FOLD_INIT \
15021 flag_signaling_nans = saved_signaling_nans;\
15022 flag_trapping_math = saved_trapping_math;\
15023 flag_rounding_math = saved_rounding_math;\
15024 flag_trapv = saved_trapv;\
15025 folding_initializer = saved_folding_initializer;
15027 tree
15028 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15029 tree type, tree op)
15031 tree result;
15032 START_FOLD_INIT;
15034 result = fold_build1_loc (loc, code, type, op);
15036 END_FOLD_INIT;
15037 return result;
15040 tree
15041 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15042 tree type, tree op0, tree op1)
15044 tree result;
15045 START_FOLD_INIT;
15047 result = fold_build2_loc (loc, code, type, op0, op1);
15049 END_FOLD_INIT;
15050 return result;
15053 tree
15054 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15055 tree type, tree op0, tree op1, tree op2)
15057 tree result;
15058 START_FOLD_INIT;
15060 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15062 END_FOLD_INIT;
15063 return result;
15066 tree
15067 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15068 int nargs, tree *argarray)
15070 tree result;
15071 START_FOLD_INIT;
15073 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15075 END_FOLD_INIT;
15076 return result;
15079 #undef START_FOLD_INIT
15080 #undef END_FOLD_INIT
15082 /* Determine if first argument is a multiple of second argument. Return 0 if
15083 it is not, or we cannot easily determined it to be.
15085 An example of the sort of thing we care about (at this point; this routine
15086 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15087 fold cases do now) is discovering that
15089 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15091 is a multiple of
15093 SAVE_EXPR (J * 8)
15095 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15097 This code also handles discovering that
15099 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15101 is a multiple of 8 so we don't have to worry about dealing with a
15102 possible remainder.
15104 Note that we *look* inside a SAVE_EXPR only to determine how it was
15105 calculated; it is not safe for fold to do much of anything else with the
15106 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15107 at run time. For example, the latter example above *cannot* be implemented
15108 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15109 evaluation time of the original SAVE_EXPR is not necessarily the same at
15110 the time the new expression is evaluated. The only optimization of this
15111 sort that would be valid is changing
15113 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15115 divided by 8 to
15117 SAVE_EXPR (I) * SAVE_EXPR (J)
15119 (where the same SAVE_EXPR (J) is used in the original and the
15120 transformed version). */
15123 multiple_of_p (tree type, const_tree top, const_tree bottom)
15125 if (operand_equal_p (top, bottom, 0))
15126 return 1;
15128 if (TREE_CODE (type) != INTEGER_TYPE)
15129 return 0;
15131 switch (TREE_CODE (top))
15133 case BIT_AND_EXPR:
15134 /* Bitwise and provides a power of two multiple. If the mask is
15135 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15136 if (!integer_pow2p (bottom))
15137 return 0;
15138 /* FALLTHRU */
15140 case MULT_EXPR:
15141 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15142 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15144 case PLUS_EXPR:
15145 case MINUS_EXPR:
15146 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15147 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15149 case LSHIFT_EXPR:
15150 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15152 tree op1, t1;
15154 op1 = TREE_OPERAND (top, 1);
15155 /* const_binop may not detect overflow correctly,
15156 so check for it explicitly here. */
15157 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15158 > TREE_INT_CST_LOW (op1)
15159 && TREE_INT_CST_HIGH (op1) == 0
15160 && 0 != (t1 = fold_convert (type,
15161 const_binop (LSHIFT_EXPR,
15162 size_one_node,
15163 op1)))
15164 && !TREE_OVERFLOW (t1))
15165 return multiple_of_p (type, t1, bottom);
15167 return 0;
15169 case NOP_EXPR:
15170 /* Can't handle conversions from non-integral or wider integral type. */
15171 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15172 || (TYPE_PRECISION (type)
15173 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15174 return 0;
15176 /* .. fall through ... */
15178 case SAVE_EXPR:
15179 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15181 case COND_EXPR:
15182 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15183 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15185 case INTEGER_CST:
15186 if (TREE_CODE (bottom) != INTEGER_CST
15187 || integer_zerop (bottom)
15188 || (TYPE_UNSIGNED (type)
15189 && (tree_int_cst_sgn (top) < 0
15190 || tree_int_cst_sgn (bottom) < 0)))
15191 return 0;
15192 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15193 top, bottom));
15195 default:
15196 return 0;
15200 /* Return true if CODE or TYPE is known to be non-negative. */
15202 static bool
15203 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15205 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15206 && truth_value_p (code))
15207 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15208 have a signed:1 type (where the value is -1 and 0). */
15209 return true;
15210 return false;
15213 /* Return true if (CODE OP0) is known to be non-negative. If the return
15214 value is based on the assumption that signed overflow is undefined,
15215 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15216 *STRICT_OVERFLOW_P. */
15218 bool
15219 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15220 bool *strict_overflow_p)
15222 if (TYPE_UNSIGNED (type))
15223 return true;
15225 switch (code)
15227 case ABS_EXPR:
15228 /* We can't return 1 if flag_wrapv is set because
15229 ABS_EXPR<INT_MIN> = INT_MIN. */
15230 if (!INTEGRAL_TYPE_P (type))
15231 return true;
15232 if (TYPE_OVERFLOW_UNDEFINED (type))
15234 *strict_overflow_p = true;
15235 return true;
15237 break;
15239 case NON_LVALUE_EXPR:
15240 case FLOAT_EXPR:
15241 case FIX_TRUNC_EXPR:
15242 return tree_expr_nonnegative_warnv_p (op0,
15243 strict_overflow_p);
15245 case NOP_EXPR:
15247 tree inner_type = TREE_TYPE (op0);
15248 tree outer_type = type;
15250 if (TREE_CODE (outer_type) == REAL_TYPE)
15252 if (TREE_CODE (inner_type) == REAL_TYPE)
15253 return tree_expr_nonnegative_warnv_p (op0,
15254 strict_overflow_p);
15255 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15257 if (TYPE_UNSIGNED (inner_type))
15258 return true;
15259 return tree_expr_nonnegative_warnv_p (op0,
15260 strict_overflow_p);
15263 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15265 if (TREE_CODE (inner_type) == REAL_TYPE)
15266 return tree_expr_nonnegative_warnv_p (op0,
15267 strict_overflow_p);
15268 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15269 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15270 && TYPE_UNSIGNED (inner_type);
15273 break;
15275 default:
15276 return tree_simple_nonnegative_warnv_p (code, type);
15279 /* We don't know sign of `t', so be conservative and return false. */
15280 return false;
15283 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15284 value is based on the assumption that signed overflow is undefined,
15285 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15286 *STRICT_OVERFLOW_P. */
15288 bool
15289 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15290 tree op1, bool *strict_overflow_p)
15292 if (TYPE_UNSIGNED (type))
15293 return true;
15295 switch (code)
15297 case POINTER_PLUS_EXPR:
15298 case PLUS_EXPR:
15299 if (FLOAT_TYPE_P (type))
15300 return (tree_expr_nonnegative_warnv_p (op0,
15301 strict_overflow_p)
15302 && tree_expr_nonnegative_warnv_p (op1,
15303 strict_overflow_p));
15305 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15306 both unsigned and at least 2 bits shorter than the result. */
15307 if (TREE_CODE (type) == INTEGER_TYPE
15308 && TREE_CODE (op0) == NOP_EXPR
15309 && TREE_CODE (op1) == NOP_EXPR)
15311 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15312 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15313 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15314 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15316 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15317 TYPE_PRECISION (inner2)) + 1;
15318 return prec < TYPE_PRECISION (type);
15321 break;
15323 case MULT_EXPR:
15324 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15326 /* x * x is always non-negative for floating point x
15327 or without overflow. */
15328 if (operand_equal_p (op0, op1, 0)
15329 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15330 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15332 if (TYPE_OVERFLOW_UNDEFINED (type))
15333 *strict_overflow_p = true;
15334 return true;
15338 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15339 both unsigned and their total bits is shorter than the result. */
15340 if (TREE_CODE (type) == INTEGER_TYPE
15341 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15342 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15344 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15345 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15346 : TREE_TYPE (op0);
15347 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15348 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15349 : TREE_TYPE (op1);
15351 bool unsigned0 = TYPE_UNSIGNED (inner0);
15352 bool unsigned1 = TYPE_UNSIGNED (inner1);
15354 if (TREE_CODE (op0) == INTEGER_CST)
15355 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15357 if (TREE_CODE (op1) == INTEGER_CST)
15358 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15360 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15361 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15363 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15364 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15365 : TYPE_PRECISION (inner0);
15367 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15368 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15369 : TYPE_PRECISION (inner1);
15371 return precision0 + precision1 < TYPE_PRECISION (type);
15374 return false;
15376 case BIT_AND_EXPR:
15377 case MAX_EXPR:
15378 return (tree_expr_nonnegative_warnv_p (op0,
15379 strict_overflow_p)
15380 || tree_expr_nonnegative_warnv_p (op1,
15381 strict_overflow_p));
15383 case BIT_IOR_EXPR:
15384 case BIT_XOR_EXPR:
15385 case MIN_EXPR:
15386 case RDIV_EXPR:
15387 case TRUNC_DIV_EXPR:
15388 case CEIL_DIV_EXPR:
15389 case FLOOR_DIV_EXPR:
15390 case ROUND_DIV_EXPR:
15391 return (tree_expr_nonnegative_warnv_p (op0,
15392 strict_overflow_p)
15393 && tree_expr_nonnegative_warnv_p (op1,
15394 strict_overflow_p));
15396 case TRUNC_MOD_EXPR:
15397 case CEIL_MOD_EXPR:
15398 case FLOOR_MOD_EXPR:
15399 case ROUND_MOD_EXPR:
15400 return tree_expr_nonnegative_warnv_p (op0,
15401 strict_overflow_p);
15402 default:
15403 return tree_simple_nonnegative_warnv_p (code, type);
15406 /* We don't know sign of `t', so be conservative and return false. */
15407 return false;
15410 /* Return true if T is known to be non-negative. If the return
15411 value is based on the assumption that signed overflow is undefined,
15412 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15413 *STRICT_OVERFLOW_P. */
15415 bool
15416 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15418 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15419 return true;
15421 switch (TREE_CODE (t))
15423 case INTEGER_CST:
15424 return tree_int_cst_sgn (t) >= 0;
15426 case REAL_CST:
15427 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15429 case FIXED_CST:
15430 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15432 case COND_EXPR:
15433 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15434 strict_overflow_p)
15435 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15436 strict_overflow_p));
15437 default:
15438 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15439 TREE_TYPE (t));
15441 /* We don't know sign of `t', so be conservative and return false. */
15442 return false;
15445 /* Return true if T is known to be non-negative. If the return
15446 value is based on the assumption that signed overflow is undefined,
15447 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15448 *STRICT_OVERFLOW_P. */
15450 bool
15451 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15452 tree arg0, tree arg1, bool *strict_overflow_p)
15454 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15455 switch (DECL_FUNCTION_CODE (fndecl))
15457 CASE_FLT_FN (BUILT_IN_ACOS):
15458 CASE_FLT_FN (BUILT_IN_ACOSH):
15459 CASE_FLT_FN (BUILT_IN_CABS):
15460 CASE_FLT_FN (BUILT_IN_COSH):
15461 CASE_FLT_FN (BUILT_IN_ERFC):
15462 CASE_FLT_FN (BUILT_IN_EXP):
15463 CASE_FLT_FN (BUILT_IN_EXP10):
15464 CASE_FLT_FN (BUILT_IN_EXP2):
15465 CASE_FLT_FN (BUILT_IN_FABS):
15466 CASE_FLT_FN (BUILT_IN_FDIM):
15467 CASE_FLT_FN (BUILT_IN_HYPOT):
15468 CASE_FLT_FN (BUILT_IN_POW10):
15469 CASE_INT_FN (BUILT_IN_FFS):
15470 CASE_INT_FN (BUILT_IN_PARITY):
15471 CASE_INT_FN (BUILT_IN_POPCOUNT):
15472 case BUILT_IN_BSWAP32:
15473 case BUILT_IN_BSWAP64:
15474 /* Always true. */
15475 return true;
15477 CASE_FLT_FN (BUILT_IN_SQRT):
15478 /* sqrt(-0.0) is -0.0. */
15479 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15480 return true;
15481 return tree_expr_nonnegative_warnv_p (arg0,
15482 strict_overflow_p);
15484 CASE_FLT_FN (BUILT_IN_ASINH):
15485 CASE_FLT_FN (BUILT_IN_ATAN):
15486 CASE_FLT_FN (BUILT_IN_ATANH):
15487 CASE_FLT_FN (BUILT_IN_CBRT):
15488 CASE_FLT_FN (BUILT_IN_CEIL):
15489 CASE_FLT_FN (BUILT_IN_ERF):
15490 CASE_FLT_FN (BUILT_IN_EXPM1):
15491 CASE_FLT_FN (BUILT_IN_FLOOR):
15492 CASE_FLT_FN (BUILT_IN_FMOD):
15493 CASE_FLT_FN (BUILT_IN_FREXP):
15494 CASE_FLT_FN (BUILT_IN_ICEIL):
15495 CASE_FLT_FN (BUILT_IN_IFLOOR):
15496 CASE_FLT_FN (BUILT_IN_IRINT):
15497 CASE_FLT_FN (BUILT_IN_IROUND):
15498 CASE_FLT_FN (BUILT_IN_LCEIL):
15499 CASE_FLT_FN (BUILT_IN_LDEXP):
15500 CASE_FLT_FN (BUILT_IN_LFLOOR):
15501 CASE_FLT_FN (BUILT_IN_LLCEIL):
15502 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15503 CASE_FLT_FN (BUILT_IN_LLRINT):
15504 CASE_FLT_FN (BUILT_IN_LLROUND):
15505 CASE_FLT_FN (BUILT_IN_LRINT):
15506 CASE_FLT_FN (BUILT_IN_LROUND):
15507 CASE_FLT_FN (BUILT_IN_MODF):
15508 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15509 CASE_FLT_FN (BUILT_IN_RINT):
15510 CASE_FLT_FN (BUILT_IN_ROUND):
15511 CASE_FLT_FN (BUILT_IN_SCALB):
15512 CASE_FLT_FN (BUILT_IN_SCALBLN):
15513 CASE_FLT_FN (BUILT_IN_SCALBN):
15514 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15515 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15516 CASE_FLT_FN (BUILT_IN_SINH):
15517 CASE_FLT_FN (BUILT_IN_TANH):
15518 CASE_FLT_FN (BUILT_IN_TRUNC):
15519 /* True if the 1st argument is nonnegative. */
15520 return tree_expr_nonnegative_warnv_p (arg0,
15521 strict_overflow_p);
15523 CASE_FLT_FN (BUILT_IN_FMAX):
15524 /* True if the 1st OR 2nd arguments are nonnegative. */
15525 return (tree_expr_nonnegative_warnv_p (arg0,
15526 strict_overflow_p)
15527 || (tree_expr_nonnegative_warnv_p (arg1,
15528 strict_overflow_p)));
15530 CASE_FLT_FN (BUILT_IN_FMIN):
15531 /* True if the 1st AND 2nd arguments are nonnegative. */
15532 return (tree_expr_nonnegative_warnv_p (arg0,
15533 strict_overflow_p)
15534 && (tree_expr_nonnegative_warnv_p (arg1,
15535 strict_overflow_p)));
15537 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15538 /* True if the 2nd argument is nonnegative. */
15539 return tree_expr_nonnegative_warnv_p (arg1,
15540 strict_overflow_p);
15542 CASE_FLT_FN (BUILT_IN_POWI):
15543 /* True if the 1st argument is nonnegative or the second
15544 argument is an even integer. */
15545 if (TREE_CODE (arg1) == INTEGER_CST
15546 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15547 return true;
15548 return tree_expr_nonnegative_warnv_p (arg0,
15549 strict_overflow_p);
15551 CASE_FLT_FN (BUILT_IN_POW):
15552 /* True if the 1st argument is nonnegative or the second
15553 argument is an even integer valued real. */
15554 if (TREE_CODE (arg1) == REAL_CST)
15556 REAL_VALUE_TYPE c;
15557 HOST_WIDE_INT n;
15559 c = TREE_REAL_CST (arg1);
15560 n = real_to_integer (&c);
15561 if ((n & 1) == 0)
15563 REAL_VALUE_TYPE cint;
15564 real_from_integer (&cint, VOIDmode, n,
15565 n < 0 ? -1 : 0, 0);
15566 if (real_identical (&c, &cint))
15567 return true;
15570 return tree_expr_nonnegative_warnv_p (arg0,
15571 strict_overflow_p);
15573 default:
15574 break;
15576 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15577 type);
15580 /* Return true if T is known to be non-negative. If the return
15581 value is based on the assumption that signed overflow is undefined,
15582 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15583 *STRICT_OVERFLOW_P. */
15585 bool
15586 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15588 enum tree_code code = TREE_CODE (t);
15589 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15590 return true;
15592 switch (code)
15594 case TARGET_EXPR:
15596 tree temp = TARGET_EXPR_SLOT (t);
15597 t = TARGET_EXPR_INITIAL (t);
15599 /* If the initializer is non-void, then it's a normal expression
15600 that will be assigned to the slot. */
15601 if (!VOID_TYPE_P (t))
15602 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15604 /* Otherwise, the initializer sets the slot in some way. One common
15605 way is an assignment statement at the end of the initializer. */
15606 while (1)
15608 if (TREE_CODE (t) == BIND_EXPR)
15609 t = expr_last (BIND_EXPR_BODY (t));
15610 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15611 || TREE_CODE (t) == TRY_CATCH_EXPR)
15612 t = expr_last (TREE_OPERAND (t, 0));
15613 else if (TREE_CODE (t) == STATEMENT_LIST)
15614 t = expr_last (t);
15615 else
15616 break;
15618 if (TREE_CODE (t) == MODIFY_EXPR
15619 && TREE_OPERAND (t, 0) == temp)
15620 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15621 strict_overflow_p);
15623 return false;
15626 case CALL_EXPR:
15628 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15629 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15631 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15632 get_callee_fndecl (t),
15633 arg0,
15634 arg1,
15635 strict_overflow_p);
15637 case COMPOUND_EXPR:
15638 case MODIFY_EXPR:
15639 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15640 strict_overflow_p);
15641 case BIND_EXPR:
15642 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15643 strict_overflow_p);
15644 case SAVE_EXPR:
15645 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15646 strict_overflow_p);
15648 default:
15649 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15650 TREE_TYPE (t));
15653 /* We don't know sign of `t', so be conservative and return false. */
15654 return false;
15657 /* Return true if T is known to be non-negative. If the return
15658 value is based on the assumption that signed overflow is undefined,
15659 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15660 *STRICT_OVERFLOW_P. */
15662 bool
15663 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15665 enum tree_code code;
15666 if (t == error_mark_node)
15667 return false;
15669 code = TREE_CODE (t);
15670 switch (TREE_CODE_CLASS (code))
15672 case tcc_binary:
15673 case tcc_comparison:
15674 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15675 TREE_TYPE (t),
15676 TREE_OPERAND (t, 0),
15677 TREE_OPERAND (t, 1),
15678 strict_overflow_p);
15680 case tcc_unary:
15681 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15682 TREE_TYPE (t),
15683 TREE_OPERAND (t, 0),
15684 strict_overflow_p);
15686 case tcc_constant:
15687 case tcc_declaration:
15688 case tcc_reference:
15689 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15691 default:
15692 break;
15695 switch (code)
15697 case TRUTH_AND_EXPR:
15698 case TRUTH_OR_EXPR:
15699 case TRUTH_XOR_EXPR:
15700 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15701 TREE_TYPE (t),
15702 TREE_OPERAND (t, 0),
15703 TREE_OPERAND (t, 1),
15704 strict_overflow_p);
15705 case TRUTH_NOT_EXPR:
15706 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15707 TREE_TYPE (t),
15708 TREE_OPERAND (t, 0),
15709 strict_overflow_p);
15711 case COND_EXPR:
15712 case CONSTRUCTOR:
15713 case OBJ_TYPE_REF:
15714 case ASSERT_EXPR:
15715 case ADDR_EXPR:
15716 case WITH_SIZE_EXPR:
15717 case SSA_NAME:
15718 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15720 default:
15721 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15725 /* Return true if `t' is known to be non-negative. Handle warnings
15726 about undefined signed overflow. */
15728 bool
15729 tree_expr_nonnegative_p (tree t)
15731 bool ret, strict_overflow_p;
15733 strict_overflow_p = false;
15734 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15735 if (strict_overflow_p)
15736 fold_overflow_warning (("assuming signed overflow does not occur when "
15737 "determining that expression is always "
15738 "non-negative"),
15739 WARN_STRICT_OVERFLOW_MISC);
15740 return ret;
15744 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15745 For floating point we further ensure that T is not denormal.
15746 Similar logic is present in nonzero_address in rtlanal.h.
15748 If the return value is based on the assumption that signed overflow
15749 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15750 change *STRICT_OVERFLOW_P. */
15752 bool
15753 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15754 bool *strict_overflow_p)
15756 switch (code)
15758 case ABS_EXPR:
15759 return tree_expr_nonzero_warnv_p (op0,
15760 strict_overflow_p);
15762 case NOP_EXPR:
15764 tree inner_type = TREE_TYPE (op0);
15765 tree outer_type = type;
15767 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15768 && tree_expr_nonzero_warnv_p (op0,
15769 strict_overflow_p));
15771 break;
15773 case NON_LVALUE_EXPR:
15774 return tree_expr_nonzero_warnv_p (op0,
15775 strict_overflow_p);
15777 default:
15778 break;
15781 return false;
15784 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15785 For floating point we further ensure that T is not denormal.
15786 Similar logic is present in nonzero_address in rtlanal.h.
15788 If the return value is based on the assumption that signed overflow
15789 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15790 change *STRICT_OVERFLOW_P. */
15792 bool
15793 tree_binary_nonzero_warnv_p (enum tree_code code,
15794 tree type,
15795 tree op0,
15796 tree op1, bool *strict_overflow_p)
15798 bool sub_strict_overflow_p;
15799 switch (code)
15801 case POINTER_PLUS_EXPR:
15802 case PLUS_EXPR:
15803 if (TYPE_OVERFLOW_UNDEFINED (type))
15805 /* With the presence of negative values it is hard
15806 to say something. */
15807 sub_strict_overflow_p = false;
15808 if (!tree_expr_nonnegative_warnv_p (op0,
15809 &sub_strict_overflow_p)
15810 || !tree_expr_nonnegative_warnv_p (op1,
15811 &sub_strict_overflow_p))
15812 return false;
15813 /* One of operands must be positive and the other non-negative. */
15814 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15815 overflows, on a twos-complement machine the sum of two
15816 nonnegative numbers can never be zero. */
15817 return (tree_expr_nonzero_warnv_p (op0,
15818 strict_overflow_p)
15819 || tree_expr_nonzero_warnv_p (op1,
15820 strict_overflow_p));
15822 break;
15824 case MULT_EXPR:
15825 if (TYPE_OVERFLOW_UNDEFINED (type))
15827 if (tree_expr_nonzero_warnv_p (op0,
15828 strict_overflow_p)
15829 && tree_expr_nonzero_warnv_p (op1,
15830 strict_overflow_p))
15832 *strict_overflow_p = true;
15833 return true;
15836 break;
15838 case MIN_EXPR:
15839 sub_strict_overflow_p = false;
15840 if (tree_expr_nonzero_warnv_p (op0,
15841 &sub_strict_overflow_p)
15842 && tree_expr_nonzero_warnv_p (op1,
15843 &sub_strict_overflow_p))
15845 if (sub_strict_overflow_p)
15846 *strict_overflow_p = true;
15848 break;
15850 case MAX_EXPR:
15851 sub_strict_overflow_p = false;
15852 if (tree_expr_nonzero_warnv_p (op0,
15853 &sub_strict_overflow_p))
15855 if (sub_strict_overflow_p)
15856 *strict_overflow_p = true;
15858 /* When both operands are nonzero, then MAX must be too. */
15859 if (tree_expr_nonzero_warnv_p (op1,
15860 strict_overflow_p))
15861 return true;
15863 /* MAX where operand 0 is positive is positive. */
15864 return tree_expr_nonnegative_warnv_p (op0,
15865 strict_overflow_p);
15867 /* MAX where operand 1 is positive is positive. */
15868 else if (tree_expr_nonzero_warnv_p (op1,
15869 &sub_strict_overflow_p)
15870 && tree_expr_nonnegative_warnv_p (op1,
15871 &sub_strict_overflow_p))
15873 if (sub_strict_overflow_p)
15874 *strict_overflow_p = true;
15875 return true;
15877 break;
15879 case BIT_IOR_EXPR:
15880 return (tree_expr_nonzero_warnv_p (op1,
15881 strict_overflow_p)
15882 || tree_expr_nonzero_warnv_p (op0,
15883 strict_overflow_p));
15885 default:
15886 break;
15889 return false;
15892 /* Return true when T is an address and is known to be nonzero.
15893 For floating point we further ensure that T is not denormal.
15894 Similar logic is present in nonzero_address in rtlanal.h.
15896 If the return value is based on the assumption that signed overflow
15897 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15898 change *STRICT_OVERFLOW_P. */
15900 bool
15901 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15903 bool sub_strict_overflow_p;
15904 switch (TREE_CODE (t))
15906 case INTEGER_CST:
15907 return !integer_zerop (t);
15909 case ADDR_EXPR:
15911 tree base = TREE_OPERAND (t, 0);
15912 if (!DECL_P (base))
15913 base = get_base_address (base);
15915 if (!base)
15916 return false;
15918 /* Weak declarations may link to NULL. Other things may also be NULL
15919 so protect with -fdelete-null-pointer-checks; but not variables
15920 allocated on the stack. */
15921 if (DECL_P (base)
15922 && (flag_delete_null_pointer_checks
15923 || (DECL_CONTEXT (base)
15924 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15925 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15926 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15928 /* Constants are never weak. */
15929 if (CONSTANT_CLASS_P (base))
15930 return true;
15932 return false;
15935 case COND_EXPR:
15936 sub_strict_overflow_p = false;
15937 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15938 &sub_strict_overflow_p)
15939 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15940 &sub_strict_overflow_p))
15942 if (sub_strict_overflow_p)
15943 *strict_overflow_p = true;
15944 return true;
15946 break;
15948 default:
15949 break;
15951 return false;
15954 /* Return true when T is an address and is known to be nonzero.
15955 For floating point we further ensure that T is not denormal.
15956 Similar logic is present in nonzero_address in rtlanal.h.
15958 If the return value is based on the assumption that signed overflow
15959 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15960 change *STRICT_OVERFLOW_P. */
15962 bool
15963 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15965 tree type = TREE_TYPE (t);
15966 enum tree_code code;
15968 /* Doing something useful for floating point would need more work. */
15969 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15970 return false;
15972 code = TREE_CODE (t);
15973 switch (TREE_CODE_CLASS (code))
15975 case tcc_unary:
15976 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15977 strict_overflow_p);
15978 case tcc_binary:
15979 case tcc_comparison:
15980 return tree_binary_nonzero_warnv_p (code, type,
15981 TREE_OPERAND (t, 0),
15982 TREE_OPERAND (t, 1),
15983 strict_overflow_p);
15984 case tcc_constant:
15985 case tcc_declaration:
15986 case tcc_reference:
15987 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15989 default:
15990 break;
15993 switch (code)
15995 case TRUTH_NOT_EXPR:
15996 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15997 strict_overflow_p);
15999 case TRUTH_AND_EXPR:
16000 case TRUTH_OR_EXPR:
16001 case TRUTH_XOR_EXPR:
16002 return tree_binary_nonzero_warnv_p (code, type,
16003 TREE_OPERAND (t, 0),
16004 TREE_OPERAND (t, 1),
16005 strict_overflow_p);
16007 case COND_EXPR:
16008 case CONSTRUCTOR:
16009 case OBJ_TYPE_REF:
16010 case ASSERT_EXPR:
16011 case ADDR_EXPR:
16012 case WITH_SIZE_EXPR:
16013 case SSA_NAME:
16014 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16016 case COMPOUND_EXPR:
16017 case MODIFY_EXPR:
16018 case BIND_EXPR:
16019 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16020 strict_overflow_p);
16022 case SAVE_EXPR:
16023 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16024 strict_overflow_p);
16026 case CALL_EXPR:
16027 return alloca_call_p (t);
16029 default:
16030 break;
16032 return false;
16035 /* Return true when T is an address and is known to be nonzero.
16036 Handle warnings about undefined signed overflow. */
16038 bool
16039 tree_expr_nonzero_p (tree t)
16041 bool ret, strict_overflow_p;
16043 strict_overflow_p = false;
16044 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16045 if (strict_overflow_p)
16046 fold_overflow_warning (("assuming signed overflow does not occur when "
16047 "determining that expression is always "
16048 "non-zero"),
16049 WARN_STRICT_OVERFLOW_MISC);
16050 return ret;
16053 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16054 attempt to fold the expression to a constant without modifying TYPE,
16055 OP0 or OP1.
16057 If the expression could be simplified to a constant, then return
16058 the constant. If the expression would not be simplified to a
16059 constant, then return NULL_TREE. */
16061 tree
16062 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16064 tree tem = fold_binary (code, type, op0, op1);
16065 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16068 /* Given the components of a unary expression CODE, TYPE and OP0,
16069 attempt to fold the expression to a constant without modifying
16070 TYPE or OP0.
16072 If the expression could be simplified to a constant, then return
16073 the constant. If the expression would not be simplified to a
16074 constant, then return NULL_TREE. */
16076 tree
16077 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16079 tree tem = fold_unary (code, type, op0);
16080 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16083 /* If EXP represents referencing an element in a constant string
16084 (either via pointer arithmetic or array indexing), return the
16085 tree representing the value accessed, otherwise return NULL. */
16087 tree
16088 fold_read_from_constant_string (tree exp)
16090 if ((TREE_CODE (exp) == INDIRECT_REF
16091 || TREE_CODE (exp) == ARRAY_REF)
16092 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16094 tree exp1 = TREE_OPERAND (exp, 0);
16095 tree index;
16096 tree string;
16097 location_t loc = EXPR_LOCATION (exp);
16099 if (TREE_CODE (exp) == INDIRECT_REF)
16100 string = string_constant (exp1, &index);
16101 else
16103 tree low_bound = array_ref_low_bound (exp);
16104 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16106 /* Optimize the special-case of a zero lower bound.
16108 We convert the low_bound to sizetype to avoid some problems
16109 with constant folding. (E.g. suppose the lower bound is 1,
16110 and its mode is QI. Without the conversion,l (ARRAY
16111 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16112 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16113 if (! integer_zerop (low_bound))
16114 index = size_diffop_loc (loc, index,
16115 fold_convert_loc (loc, sizetype, low_bound));
16117 string = exp1;
16120 if (string
16121 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16122 && TREE_CODE (string) == STRING_CST
16123 && TREE_CODE (index) == INTEGER_CST
16124 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16125 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16126 == MODE_INT)
16127 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16128 return build_int_cst_type (TREE_TYPE (exp),
16129 (TREE_STRING_POINTER (string)
16130 [TREE_INT_CST_LOW (index)]));
16132 return NULL;
16135 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16136 an integer constant, real, or fixed-point constant.
16138 TYPE is the type of the result. */
16140 static tree
16141 fold_negate_const (tree arg0, tree type)
16143 tree t = NULL_TREE;
16145 switch (TREE_CODE (arg0))
16147 case INTEGER_CST:
16149 double_int val = tree_to_double_int (arg0);
16150 bool overflow;
16151 val = val.neg_with_overflow (&overflow);
16152 t = force_fit_type_double (type, val, 1,
16153 (overflow | TREE_OVERFLOW (arg0))
16154 && !TYPE_UNSIGNED (type));
16155 break;
16158 case REAL_CST:
16159 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16160 break;
16162 case FIXED_CST:
16164 FIXED_VALUE_TYPE f;
16165 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16166 &(TREE_FIXED_CST (arg0)), NULL,
16167 TYPE_SATURATING (type));
16168 t = build_fixed (type, f);
16169 /* Propagate overflow flags. */
16170 if (overflow_p | TREE_OVERFLOW (arg0))
16171 TREE_OVERFLOW (t) = 1;
16172 break;
16175 default:
16176 gcc_unreachable ();
16179 return t;
16182 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16183 an integer constant or real constant.
16185 TYPE is the type of the result. */
16187 tree
16188 fold_abs_const (tree arg0, tree type)
16190 tree t = NULL_TREE;
16192 switch (TREE_CODE (arg0))
16194 case INTEGER_CST:
16196 double_int val = tree_to_double_int (arg0);
16198 /* If the value is unsigned or non-negative, then the absolute value
16199 is the same as the ordinary value. */
16200 if (TYPE_UNSIGNED (type)
16201 || !val.is_negative ())
16202 t = arg0;
16204 /* If the value is negative, then the absolute value is
16205 its negation. */
16206 else
16208 bool overflow;
16209 val = val.neg_with_overflow (&overflow);
16210 t = force_fit_type_double (type, val, -1,
16211 overflow | TREE_OVERFLOW (arg0));
16214 break;
16216 case REAL_CST:
16217 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16218 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16219 else
16220 t = arg0;
16221 break;
16223 default:
16224 gcc_unreachable ();
16227 return t;
16230 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16231 constant. TYPE is the type of the result. */
16233 static tree
16234 fold_not_const (const_tree arg0, tree type)
16236 double_int val;
16238 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16240 val = ~tree_to_double_int (arg0);
16241 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16244 /* Given CODE, a relational operator, the target type, TYPE and two
16245 constant operands OP0 and OP1, return the result of the
16246 relational operation. If the result is not a compile time
16247 constant, then return NULL_TREE. */
16249 static tree
16250 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16252 int result, invert;
16254 /* From here on, the only cases we handle are when the result is
16255 known to be a constant. */
16257 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16259 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16260 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16262 /* Handle the cases where either operand is a NaN. */
16263 if (real_isnan (c0) || real_isnan (c1))
16265 switch (code)
16267 case EQ_EXPR:
16268 case ORDERED_EXPR:
16269 result = 0;
16270 break;
16272 case NE_EXPR:
16273 case UNORDERED_EXPR:
16274 case UNLT_EXPR:
16275 case UNLE_EXPR:
16276 case UNGT_EXPR:
16277 case UNGE_EXPR:
16278 case UNEQ_EXPR:
16279 result = 1;
16280 break;
16282 case LT_EXPR:
16283 case LE_EXPR:
16284 case GT_EXPR:
16285 case GE_EXPR:
16286 case LTGT_EXPR:
16287 if (flag_trapping_math)
16288 return NULL_TREE;
16289 result = 0;
16290 break;
16292 default:
16293 gcc_unreachable ();
16296 return constant_boolean_node (result, type);
16299 return constant_boolean_node (real_compare (code, c0, c1), type);
16302 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16304 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16305 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16306 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16309 /* Handle equality/inequality of complex constants. */
16310 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16312 tree rcond = fold_relational_const (code, type,
16313 TREE_REALPART (op0),
16314 TREE_REALPART (op1));
16315 tree icond = fold_relational_const (code, type,
16316 TREE_IMAGPART (op0),
16317 TREE_IMAGPART (op1));
16318 if (code == EQ_EXPR)
16319 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16320 else if (code == NE_EXPR)
16321 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16322 else
16323 return NULL_TREE;
16326 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16328 unsigned count = VECTOR_CST_NELTS (op0);
16329 tree *elts = XALLOCAVEC (tree, count);
16330 gcc_assert (VECTOR_CST_NELTS (op1) == count
16331 && TYPE_VECTOR_SUBPARTS (type) == count);
16333 for (unsigned i = 0; i < count; i++)
16335 tree elem_type = TREE_TYPE (type);
16336 tree elem0 = VECTOR_CST_ELT (op0, i);
16337 tree elem1 = VECTOR_CST_ELT (op1, i);
16339 tree tem = fold_relational_const (code, elem_type,
16340 elem0, elem1);
16342 if (tem == NULL_TREE)
16343 return NULL_TREE;
16345 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16348 return build_vector (type, elts);
16351 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16353 To compute GT, swap the arguments and do LT.
16354 To compute GE, do LT and invert the result.
16355 To compute LE, swap the arguments, do LT and invert the result.
16356 To compute NE, do EQ and invert the result.
16358 Therefore, the code below must handle only EQ and LT. */
16360 if (code == LE_EXPR || code == GT_EXPR)
16362 tree tem = op0;
16363 op0 = op1;
16364 op1 = tem;
16365 code = swap_tree_comparison (code);
16368 /* Note that it is safe to invert for real values here because we
16369 have already handled the one case that it matters. */
16371 invert = 0;
16372 if (code == NE_EXPR || code == GE_EXPR)
16374 invert = 1;
16375 code = invert_tree_comparison (code, false);
16378 /* Compute a result for LT or EQ if args permit;
16379 Otherwise return T. */
16380 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16382 if (code == EQ_EXPR)
16383 result = tree_int_cst_equal (op0, op1);
16384 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16385 result = INT_CST_LT_UNSIGNED (op0, op1);
16386 else
16387 result = INT_CST_LT (op0, op1);
16389 else
16390 return NULL_TREE;
16392 if (invert)
16393 result ^= 1;
16394 return constant_boolean_node (result, type);
16397 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16398 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16399 itself. */
16401 tree
16402 fold_build_cleanup_point_expr (tree type, tree expr)
16404 /* If the expression does not have side effects then we don't have to wrap
16405 it with a cleanup point expression. */
16406 if (!TREE_SIDE_EFFECTS (expr))
16407 return expr;
16409 /* If the expression is a return, check to see if the expression inside the
16410 return has no side effects or the right hand side of the modify expression
16411 inside the return. If either don't have side effects set we don't need to
16412 wrap the expression in a cleanup point expression. Note we don't check the
16413 left hand side of the modify because it should always be a return decl. */
16414 if (TREE_CODE (expr) == RETURN_EXPR)
16416 tree op = TREE_OPERAND (expr, 0);
16417 if (!op || !TREE_SIDE_EFFECTS (op))
16418 return expr;
16419 op = TREE_OPERAND (op, 1);
16420 if (!TREE_SIDE_EFFECTS (op))
16421 return expr;
16424 return build1 (CLEANUP_POINT_EXPR, type, expr);
16427 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16428 of an indirection through OP0, or NULL_TREE if no simplification is
16429 possible. */
16431 tree
16432 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16434 tree sub = op0;
16435 tree subtype;
16437 STRIP_NOPS (sub);
16438 subtype = TREE_TYPE (sub);
16439 if (!POINTER_TYPE_P (subtype))
16440 return NULL_TREE;
16442 if (TREE_CODE (sub) == ADDR_EXPR)
16444 tree op = TREE_OPERAND (sub, 0);
16445 tree optype = TREE_TYPE (op);
16446 /* *&CONST_DECL -> to the value of the const decl. */
16447 if (TREE_CODE (op) == CONST_DECL)
16448 return DECL_INITIAL (op);
16449 /* *&p => p; make sure to handle *&"str"[cst] here. */
16450 if (type == optype)
16452 tree fop = fold_read_from_constant_string (op);
16453 if (fop)
16454 return fop;
16455 else
16456 return op;
16458 /* *(foo *)&fooarray => fooarray[0] */
16459 else if (TREE_CODE (optype) == ARRAY_TYPE
16460 && type == TREE_TYPE (optype)
16461 && (!in_gimple_form
16462 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16464 tree type_domain = TYPE_DOMAIN (optype);
16465 tree min_val = size_zero_node;
16466 if (type_domain && TYPE_MIN_VALUE (type_domain))
16467 min_val = TYPE_MIN_VALUE (type_domain);
16468 if (in_gimple_form
16469 && TREE_CODE (min_val) != INTEGER_CST)
16470 return NULL_TREE;
16471 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16472 NULL_TREE, NULL_TREE);
16474 /* *(foo *)&complexfoo => __real__ complexfoo */
16475 else if (TREE_CODE (optype) == COMPLEX_TYPE
16476 && type == TREE_TYPE (optype))
16477 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16478 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16479 else if (TREE_CODE (optype) == VECTOR_TYPE
16480 && type == TREE_TYPE (optype))
16482 tree part_width = TYPE_SIZE (type);
16483 tree index = bitsize_int (0);
16484 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16488 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16489 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16491 tree op00 = TREE_OPERAND (sub, 0);
16492 tree op01 = TREE_OPERAND (sub, 1);
16494 STRIP_NOPS (op00);
16495 if (TREE_CODE (op00) == ADDR_EXPR)
16497 tree op00type;
16498 op00 = TREE_OPERAND (op00, 0);
16499 op00type = TREE_TYPE (op00);
16501 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16502 if (TREE_CODE (op00type) == VECTOR_TYPE
16503 && type == TREE_TYPE (op00type))
16505 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16506 tree part_width = TYPE_SIZE (type);
16507 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16508 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16509 tree index = bitsize_int (indexi);
16511 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16512 return fold_build3_loc (loc,
16513 BIT_FIELD_REF, type, op00,
16514 part_width, index);
16517 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16518 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16519 && type == TREE_TYPE (op00type))
16521 tree size = TYPE_SIZE_UNIT (type);
16522 if (tree_int_cst_equal (size, op01))
16523 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16525 /* ((foo *)&fooarray)[1] => fooarray[1] */
16526 else if (TREE_CODE (op00type) == ARRAY_TYPE
16527 && type == TREE_TYPE (op00type))
16529 tree type_domain = TYPE_DOMAIN (op00type);
16530 tree min_val = size_zero_node;
16531 if (type_domain && TYPE_MIN_VALUE (type_domain))
16532 min_val = TYPE_MIN_VALUE (type_domain);
16533 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16534 TYPE_SIZE_UNIT (type));
16535 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16536 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16537 NULL_TREE, NULL_TREE);
16542 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16543 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16544 && type == TREE_TYPE (TREE_TYPE (subtype))
16545 && (!in_gimple_form
16546 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16548 tree type_domain;
16549 tree min_val = size_zero_node;
16550 sub = build_fold_indirect_ref_loc (loc, sub);
16551 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16552 if (type_domain && TYPE_MIN_VALUE (type_domain))
16553 min_val = TYPE_MIN_VALUE (type_domain);
16554 if (in_gimple_form
16555 && TREE_CODE (min_val) != INTEGER_CST)
16556 return NULL_TREE;
16557 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16558 NULL_TREE);
16561 return NULL_TREE;
16564 /* Builds an expression for an indirection through T, simplifying some
16565 cases. */
16567 tree
16568 build_fold_indirect_ref_loc (location_t loc, tree t)
16570 tree type = TREE_TYPE (TREE_TYPE (t));
16571 tree sub = fold_indirect_ref_1 (loc, type, t);
16573 if (sub)
16574 return sub;
16576 return build1_loc (loc, INDIRECT_REF, type, t);
16579 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16581 tree
16582 fold_indirect_ref_loc (location_t loc, tree t)
16584 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16586 if (sub)
16587 return sub;
16588 else
16589 return t;
16592 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16593 whose result is ignored. The type of the returned tree need not be
16594 the same as the original expression. */
16596 tree
16597 fold_ignored_result (tree t)
16599 if (!TREE_SIDE_EFFECTS (t))
16600 return integer_zero_node;
16602 for (;;)
16603 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16605 case tcc_unary:
16606 t = TREE_OPERAND (t, 0);
16607 break;
16609 case tcc_binary:
16610 case tcc_comparison:
16611 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16612 t = TREE_OPERAND (t, 0);
16613 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16614 t = TREE_OPERAND (t, 1);
16615 else
16616 return t;
16617 break;
16619 case tcc_expression:
16620 switch (TREE_CODE (t))
16622 case COMPOUND_EXPR:
16623 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16624 return t;
16625 t = TREE_OPERAND (t, 0);
16626 break;
16628 case COND_EXPR:
16629 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16630 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16631 return t;
16632 t = TREE_OPERAND (t, 0);
16633 break;
16635 default:
16636 return t;
16638 break;
16640 default:
16641 return t;
16645 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16646 This can only be applied to objects of a sizetype. */
16648 tree
16649 round_up_loc (location_t loc, tree value, int divisor)
16651 tree div = NULL_TREE;
16653 gcc_assert (divisor > 0);
16654 if (divisor == 1)
16655 return value;
16657 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16658 have to do anything. Only do this when we are not given a const,
16659 because in that case, this check is more expensive than just
16660 doing it. */
16661 if (TREE_CODE (value) != INTEGER_CST)
16663 div = build_int_cst (TREE_TYPE (value), divisor);
16665 if (multiple_of_p (TREE_TYPE (value), value, div))
16666 return value;
16669 /* If divisor is a power of two, simplify this to bit manipulation. */
16670 if (divisor == (divisor & -divisor))
16672 if (TREE_CODE (value) == INTEGER_CST)
16674 double_int val = tree_to_double_int (value);
16675 bool overflow_p;
16677 if ((val.low & (divisor - 1)) == 0)
16678 return value;
16680 overflow_p = TREE_OVERFLOW (value);
16681 val.low &= ~(divisor - 1);
16682 val.low += divisor;
16683 if (val.low == 0)
16685 val.high++;
16686 if (val.high == 0)
16687 overflow_p = true;
16690 return force_fit_type_double (TREE_TYPE (value), val,
16691 -1, overflow_p);
16693 else
16695 tree t;
16697 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16698 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16699 t = build_int_cst (TREE_TYPE (value), -divisor);
16700 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16703 else
16705 if (!div)
16706 div = build_int_cst (TREE_TYPE (value), divisor);
16707 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16708 value = size_binop_loc (loc, MULT_EXPR, value, div);
16711 return value;
16714 /* Likewise, but round down. */
16716 tree
16717 round_down_loc (location_t loc, tree value, int divisor)
16719 tree div = NULL_TREE;
16721 gcc_assert (divisor > 0);
16722 if (divisor == 1)
16723 return value;
16725 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16726 have to do anything. Only do this when we are not given a const,
16727 because in that case, this check is more expensive than just
16728 doing it. */
16729 if (TREE_CODE (value) != INTEGER_CST)
16731 div = build_int_cst (TREE_TYPE (value), divisor);
16733 if (multiple_of_p (TREE_TYPE (value), value, div))
16734 return value;
16737 /* If divisor is a power of two, simplify this to bit manipulation. */
16738 if (divisor == (divisor & -divisor))
16740 tree t;
16742 t = build_int_cst (TREE_TYPE (value), -divisor);
16743 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16745 else
16747 if (!div)
16748 div = build_int_cst (TREE_TYPE (value), divisor);
16749 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16750 value = size_binop_loc (loc, MULT_EXPR, value, div);
16753 return value;
16756 /* Returns the pointer to the base of the object addressed by EXP and
16757 extracts the information about the offset of the access, storing it
16758 to PBITPOS and POFFSET. */
16760 static tree
16761 split_address_to_core_and_offset (tree exp,
16762 HOST_WIDE_INT *pbitpos, tree *poffset)
16764 tree core;
16765 enum machine_mode mode;
16766 int unsignedp, volatilep;
16767 HOST_WIDE_INT bitsize;
16768 location_t loc = EXPR_LOCATION (exp);
16770 if (TREE_CODE (exp) == ADDR_EXPR)
16772 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16773 poffset, &mode, &unsignedp, &volatilep,
16774 false);
16775 core = build_fold_addr_expr_loc (loc, core);
16777 else
16779 core = exp;
16780 *pbitpos = 0;
16781 *poffset = NULL_TREE;
16784 return core;
16787 /* Returns true if addresses of E1 and E2 differ by a constant, false
16788 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16790 bool
16791 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16793 tree core1, core2;
16794 HOST_WIDE_INT bitpos1, bitpos2;
16795 tree toffset1, toffset2, tdiff, type;
16797 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16798 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16800 if (bitpos1 % BITS_PER_UNIT != 0
16801 || bitpos2 % BITS_PER_UNIT != 0
16802 || !operand_equal_p (core1, core2, 0))
16803 return false;
16805 if (toffset1 && toffset2)
16807 type = TREE_TYPE (toffset1);
16808 if (type != TREE_TYPE (toffset2))
16809 toffset2 = fold_convert (type, toffset2);
16811 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16812 if (!cst_and_fits_in_hwi (tdiff))
16813 return false;
16815 *diff = int_cst_value (tdiff);
16817 else if (toffset1 || toffset2)
16819 /* If only one of the offsets is non-constant, the difference cannot
16820 be a constant. */
16821 return false;
16823 else
16824 *diff = 0;
16826 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16827 return true;
16830 /* Simplify the floating point expression EXP when the sign of the
16831 result is not significant. Return NULL_TREE if no simplification
16832 is possible. */
16834 tree
16835 fold_strip_sign_ops (tree exp)
16837 tree arg0, arg1;
16838 location_t loc = EXPR_LOCATION (exp);
16840 switch (TREE_CODE (exp))
16842 case ABS_EXPR:
16843 case NEGATE_EXPR:
16844 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16845 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16847 case MULT_EXPR:
16848 case RDIV_EXPR:
16849 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16850 return NULL_TREE;
16851 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16852 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16853 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16854 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16855 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16856 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16857 break;
16859 case COMPOUND_EXPR:
16860 arg0 = TREE_OPERAND (exp, 0);
16861 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16862 if (arg1)
16863 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16864 break;
16866 case COND_EXPR:
16867 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16868 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16869 if (arg0 || arg1)
16870 return fold_build3_loc (loc,
16871 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16872 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16873 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16874 break;
16876 case CALL_EXPR:
16878 const enum built_in_function fcode = builtin_mathfn_code (exp);
16879 switch (fcode)
16881 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16882 /* Strip copysign function call, return the 1st argument. */
16883 arg0 = CALL_EXPR_ARG (exp, 0);
16884 arg1 = CALL_EXPR_ARG (exp, 1);
16885 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16887 default:
16888 /* Strip sign ops from the argument of "odd" math functions. */
16889 if (negate_mathfn_p (fcode))
16891 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16892 if (arg0)
16893 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16895 break;
16898 break;
16900 default:
16901 break;
16903 return NULL_TREE;