PR ada/58264
[official-gcc.git] / gcc / fold-const.c
blobd23c1737ed33fb668918d9e0b9b2137383694bcc
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-ssa.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case VECTOR_CST:
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
435 return true;
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case FLOOR_DIV_EXPR:
479 case CEIL_DIV_EXPR:
480 case EXACT_DIV_EXPR:
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
485 overflow. */
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
488 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
489 break;
490 /* If overflow is undefined then we have to be careful because
491 we ask whether it's ok to associate the negate with the
492 division which is not ok for example for
493 -((a - b) / c) where (-(a - b)) / c may invoke undefined
494 overflow because of negating INT_MIN. So do not use
495 negate_expr_p here but open-code the two important cases. */
496 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
497 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
498 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
499 return true;
501 else if (negate_expr_p (TREE_OPERAND (t, 0)))
502 return true;
503 return negate_expr_p (TREE_OPERAND (t, 1));
505 case NOP_EXPR:
506 /* Negate -((double)float) as (double)(-float). */
507 if (TREE_CODE (type) == REAL_TYPE)
509 tree tem = strip_float_extensions (t);
510 if (tem != t)
511 return negate_expr_p (tem);
513 break;
515 case CALL_EXPR:
516 /* Negate -f(x) as f(-x). */
517 if (negate_mathfn_p (builtin_mathfn_code (t)))
518 return negate_expr_p (CALL_EXPR_ARG (t, 0));
519 break;
521 case RSHIFT_EXPR:
522 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
523 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
525 tree op1 = TREE_OPERAND (t, 1);
526 if (TREE_INT_CST_HIGH (op1) == 0
527 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
528 == TREE_INT_CST_LOW (op1))
529 return true;
531 break;
533 default:
534 break;
536 return false;
539 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
540 simplification is possible.
541 If negate_expr_p would return true for T, NULL_TREE will never be
542 returned. */
544 static tree
545 fold_negate_expr (location_t loc, tree t)
547 tree type = TREE_TYPE (t);
548 tree tem;
550 switch (TREE_CODE (t))
552 /* Convert - (~A) to A + 1. */
553 case BIT_NOT_EXPR:
554 if (INTEGRAL_TYPE_P (type))
555 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
556 build_one_cst (type));
557 break;
559 case INTEGER_CST:
560 tem = fold_negate_const (t, type);
561 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
562 || !TYPE_OVERFLOW_TRAPS (type))
563 return tem;
564 break;
566 case REAL_CST:
567 tem = fold_negate_const (t, type);
568 /* Two's complement FP formats, such as c4x, may overflow. */
569 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
570 return tem;
571 break;
573 case FIXED_CST:
574 tem = fold_negate_const (t, type);
575 return tem;
577 case COMPLEX_CST:
579 tree rpart = negate_expr (TREE_REALPART (t));
580 tree ipart = negate_expr (TREE_IMAGPART (t));
582 if ((TREE_CODE (rpart) == REAL_CST
583 && TREE_CODE (ipart) == REAL_CST)
584 || (TREE_CODE (rpart) == INTEGER_CST
585 && TREE_CODE (ipart) == INTEGER_CST))
586 return build_complex (type, rpart, ipart);
588 break;
590 case VECTOR_CST:
592 int count = TYPE_VECTOR_SUBPARTS (type), i;
593 tree *elts = XALLOCAVEC (tree, count);
595 for (i = 0; i < count; i++)
597 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
598 if (elts[i] == NULL_TREE)
599 return NULL_TREE;
602 return build_vector (type, elts);
605 case COMPLEX_EXPR:
606 if (negate_expr_p (t))
607 return fold_build2_loc (loc, COMPLEX_EXPR, type,
608 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
609 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
610 break;
612 case CONJ_EXPR:
613 if (negate_expr_p (t))
614 return fold_build1_loc (loc, CONJ_EXPR, type,
615 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
616 break;
618 case NEGATE_EXPR:
619 return TREE_OPERAND (t, 0);
621 case PLUS_EXPR:
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
625 /* -(A + B) -> (-B) - A. */
626 if (negate_expr_p (TREE_OPERAND (t, 1))
627 && reorder_operands_p (TREE_OPERAND (t, 0),
628 TREE_OPERAND (t, 1)))
630 tem = negate_expr (TREE_OPERAND (t, 1));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 0));
635 /* -(A + B) -> (-A) - B. */
636 if (negate_expr_p (TREE_OPERAND (t, 0)))
638 tem = negate_expr (TREE_OPERAND (t, 0));
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 tem, TREE_OPERAND (t, 1));
643 break;
645 case MINUS_EXPR:
646 /* - (A - B) -> B - A */
647 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
648 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
649 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
650 return fold_build2_loc (loc, MINUS_EXPR, type,
651 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
652 break;
654 case MULT_EXPR:
655 if (TYPE_UNSIGNED (type))
656 break;
658 /* Fall through. */
660 case RDIV_EXPR:
661 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
663 tem = TREE_OPERAND (t, 1);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 TREE_OPERAND (t, 0), negate_expr (tem));
667 tem = TREE_OPERAND (t, 0);
668 if (negate_expr_p (tem))
669 return fold_build2_loc (loc, TREE_CODE (t), type,
670 negate_expr (tem), TREE_OPERAND (t, 1));
672 break;
674 case TRUNC_DIV_EXPR:
675 case ROUND_DIV_EXPR:
676 case FLOOR_DIV_EXPR:
677 case CEIL_DIV_EXPR:
678 case EXACT_DIV_EXPR:
679 /* In general we can't negate A / B, because if A is INT_MIN and
680 B is 1, we may turn this into INT_MIN / -1 which is undefined
681 and actually traps on some architectures. But if overflow is
682 undefined, we can negate, because - (INT_MIN / 1) is an
683 overflow. */
684 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
686 const char * const warnmsg = G_("assuming signed overflow does not "
687 "occur when negating a division");
688 tem = TREE_OPERAND (t, 1);
689 if (negate_expr_p (tem))
691 if (INTEGRAL_TYPE_P (type)
692 && (TREE_CODE (tem) != INTEGER_CST
693 || integer_onep (tem)))
694 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
695 return fold_build2_loc (loc, TREE_CODE (t), type,
696 TREE_OPERAND (t, 0), negate_expr (tem));
698 /* If overflow is undefined then we have to be careful because
699 we ask whether it's ok to associate the negate with the
700 division which is not ok for example for
701 -((a - b) / c) where (-(a - b)) / c may invoke undefined
702 overflow because of negating INT_MIN. So do not use
703 negate_expr_p here but open-code the two important cases. */
704 tem = TREE_OPERAND (t, 0);
705 if ((INTEGRAL_TYPE_P (type)
706 && (TREE_CODE (tem) == NEGATE_EXPR
707 || (TREE_CODE (tem) == INTEGER_CST
708 && may_negate_without_overflow_p (tem))))
709 || !INTEGRAL_TYPE_P (type))
710 return fold_build2_loc (loc, TREE_CODE (t), type,
711 negate_expr (tem), TREE_OPERAND (t, 1));
713 break;
715 case NOP_EXPR:
716 /* Convert -((double)float) into (double)(-float). */
717 if (TREE_CODE (type) == REAL_TYPE)
719 tem = strip_float_extensions (t);
720 if (tem != t && negate_expr_p (tem))
721 return fold_convert_loc (loc, type, negate_expr (tem));
723 break;
725 case CALL_EXPR:
726 /* Negate -f(x) as f(-x). */
727 if (negate_mathfn_p (builtin_mathfn_code (t))
728 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
730 tree fndecl, arg;
732 fndecl = get_callee_fndecl (t);
733 arg = negate_expr (CALL_EXPR_ARG (t, 0));
734 return build_call_expr_loc (loc, fndecl, 1, arg);
736 break;
738 case RSHIFT_EXPR:
739 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
740 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
742 tree op1 = TREE_OPERAND (t, 1);
743 if (TREE_INT_CST_HIGH (op1) == 0
744 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
745 == TREE_INT_CST_LOW (op1))
747 tree ntype = TYPE_UNSIGNED (type)
748 ? signed_type_for (type)
749 : unsigned_type_for (type);
750 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
751 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
752 return fold_convert_loc (loc, type, temp);
755 break;
757 default:
758 break;
761 return NULL_TREE;
764 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
765 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
766 return NULL_TREE. */
768 static tree
769 negate_expr (tree t)
771 tree type, tem;
772 location_t loc;
774 if (t == NULL_TREE)
775 return NULL_TREE;
777 loc = EXPR_LOCATION (t);
778 type = TREE_TYPE (t);
779 STRIP_SIGN_NOPS (t);
781 tem = fold_negate_expr (loc, t);
782 if (!tem)
783 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
784 return fold_convert_loc (loc, type, tem);
787 /* Split a tree IN into a constant, literal and variable parts that could be
788 combined with CODE to make IN. "constant" means an expression with
789 TREE_CONSTANT but that isn't an actual constant. CODE must be a
790 commutative arithmetic operation. Store the constant part into *CONP,
791 the literal in *LITP and return the variable part. If a part isn't
792 present, set it to null. If the tree does not decompose in this way,
793 return the entire tree as the variable part and the other parts as null.
795 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
796 case, we negate an operand that was subtracted. Except if it is a
797 literal for which we use *MINUS_LITP instead.
799 If NEGATE_P is true, we are negating all of IN, again except a literal
800 for which we use *MINUS_LITP instead.
802 If IN is itself a literal or constant, return it as appropriate.
804 Note that we do not guarantee that any of the three values will be the
805 same type as IN, but they will have the same signedness and mode. */
807 static tree
808 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
809 tree *minus_litp, int negate_p)
811 tree var = 0;
813 *conp = 0;
814 *litp = 0;
815 *minus_litp = 0;
817 /* Strip any conversions that don't change the machine mode or signedness. */
818 STRIP_SIGN_NOPS (in);
820 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
821 || TREE_CODE (in) == FIXED_CST)
822 *litp = in;
823 else if (TREE_CODE (in) == code
824 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
825 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
826 /* We can associate addition and subtraction together (even
827 though the C standard doesn't say so) for integers because
828 the value is not affected. For reals, the value might be
829 affected, so we can't. */
830 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
831 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
833 tree op0 = TREE_OPERAND (in, 0);
834 tree op1 = TREE_OPERAND (in, 1);
835 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
836 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
838 /* First see if either of the operands is a literal, then a constant. */
839 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
840 || TREE_CODE (op0) == FIXED_CST)
841 *litp = op0, op0 = 0;
842 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
843 || TREE_CODE (op1) == FIXED_CST)
844 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
846 if (op0 != 0 && TREE_CONSTANT (op0))
847 *conp = op0, op0 = 0;
848 else if (op1 != 0 && TREE_CONSTANT (op1))
849 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
851 /* If we haven't dealt with either operand, this is not a case we can
852 decompose. Otherwise, VAR is either of the ones remaining, if any. */
853 if (op0 != 0 && op1 != 0)
854 var = in;
855 else if (op0 != 0)
856 var = op0;
857 else
858 var = op1, neg_var_p = neg1_p;
860 /* Now do any needed negations. */
861 if (neg_litp_p)
862 *minus_litp = *litp, *litp = 0;
863 if (neg_conp_p)
864 *conp = negate_expr (*conp);
865 if (neg_var_p)
866 var = negate_expr (var);
868 else if (TREE_CODE (in) == BIT_NOT_EXPR
869 && code == PLUS_EXPR)
871 /* -X - 1 is folded to ~X, undo that here. */
872 *minus_litp = build_one_cst (TREE_TYPE (in));
873 var = negate_expr (TREE_OPERAND (in, 0));
875 else if (TREE_CONSTANT (in))
876 *conp = in;
877 else
878 var = in;
880 if (negate_p)
882 if (*litp)
883 *minus_litp = *litp, *litp = 0;
884 else if (*minus_litp)
885 *litp = *minus_litp, *minus_litp = 0;
886 *conp = negate_expr (*conp);
887 var = negate_expr (var);
890 return var;
893 /* Re-associate trees split by the above function. T1 and T2 are
894 either expressions to associate or null. Return the new
895 expression, if any. LOC is the location of the new expression. If
896 we build an operation, do it in TYPE and with CODE. */
898 static tree
899 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
901 if (t1 == 0)
902 return t2;
903 else if (t2 == 0)
904 return t1;
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 if (code == PLUS_EXPR)
914 if (TREE_CODE (t1) == NEGATE_EXPR)
915 return build2_loc (loc, MINUS_EXPR, type,
916 fold_convert_loc (loc, type, t2),
917 fold_convert_loc (loc, type,
918 TREE_OPERAND (t1, 0)));
919 else if (TREE_CODE (t2) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t1),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t2, 0)));
924 else if (integer_zerop (t2))
925 return fold_convert_loc (loc, type, t1);
927 else if (code == MINUS_EXPR)
929 if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
933 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 fold_convert_loc (loc, type, t2));
937 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942 for use in int_const_binop, size_binop and size_diffop. */
944 static bool
945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948 return false;
949 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950 return false;
952 switch (code)
954 case LSHIFT_EXPR:
955 case RSHIFT_EXPR:
956 case LROTATE_EXPR:
957 case RROTATE_EXPR:
958 return true;
960 default:
961 break;
964 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 && TYPE_MODE (type1) == TYPE_MODE (type2);
970 /* Combine two integer constants ARG1 and ARG2 under operation CODE
971 to produce a new constant. Return NULL_TREE if we don't know how
972 to evaluate CODE at compile-time. */
974 static tree
975 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
976 int overflowable)
978 double_int op1, op2, res, tmp;
979 tree t;
980 tree type = TREE_TYPE (arg1);
981 bool uns = TYPE_UNSIGNED (type);
982 bool overflow = false;
984 op1 = tree_to_double_int (arg1);
985 op2 = tree_to_double_int (arg2);
987 switch (code)
989 case BIT_IOR_EXPR:
990 res = op1 | op2;
991 break;
993 case BIT_XOR_EXPR:
994 res = op1 ^ op2;
995 break;
997 case BIT_AND_EXPR:
998 res = op1 & op2;
999 break;
1001 case RSHIFT_EXPR:
1002 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1003 break;
1005 case LSHIFT_EXPR:
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1010 break;
1012 case RROTATE_EXPR:
1013 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1014 break;
1016 case LROTATE_EXPR:
1017 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1018 break;
1020 case PLUS_EXPR:
1021 res = op1.add_with_sign (op2, false, &overflow);
1022 break;
1024 case MINUS_EXPR:
1025 res = op1.sub_with_overflow (op2, &overflow);
1026 break;
1028 case MULT_EXPR:
1029 res = op1.mul_with_sign (op2, false, &overflow);
1030 break;
1032 case MULT_HIGHPART_EXPR:
1033 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1035 bool dummy_overflow;
1036 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1037 return NULL_TREE;
1038 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1040 else
1042 bool dummy_overflow;
1043 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1044 is performed in twice the precision of arguments. */
1045 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1046 res = tmp.rshift (TYPE_PRECISION (type),
1047 2 * TYPE_PRECISION (type), !uns);
1049 break;
1051 case TRUNC_DIV_EXPR:
1052 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 /* This is a shortcut for a common special case. */
1055 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1056 && !TREE_OVERFLOW (arg1)
1057 && !TREE_OVERFLOW (arg2)
1058 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1060 if (code == CEIL_DIV_EXPR)
1061 op1.low += op2.low - 1;
1063 res.low = op1.low / op2.low, res.high = 0;
1064 break;
1067 /* ... fall through ... */
1069 case ROUND_DIV_EXPR:
1070 if (op2.is_zero ())
1071 return NULL_TREE;
1072 if (op2.is_one ())
1074 res = op1;
1075 break;
1077 if (op1 == op2 && !op1.is_zero ())
1079 res = double_int_one;
1080 break;
1082 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1083 break;
1085 case TRUNC_MOD_EXPR:
1086 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1087 /* This is a shortcut for a common special case. */
1088 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1089 && !TREE_OVERFLOW (arg1)
1090 && !TREE_OVERFLOW (arg2)
1091 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1093 if (code == CEIL_MOD_EXPR)
1094 op1.low += op2.low - 1;
1095 res.low = op1.low % op2.low, res.high = 0;
1096 break;
1099 /* ... fall through ... */
1101 case ROUND_MOD_EXPR:
1102 if (op2.is_zero ())
1103 return NULL_TREE;
1104 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1105 break;
1107 case MIN_EXPR:
1108 res = op1.min (op2, uns);
1109 break;
1111 case MAX_EXPR:
1112 res = op1.max (op2, uns);
1113 break;
1115 default:
1116 return NULL_TREE;
1119 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1120 (!uns && overflow)
1121 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1123 return t;
1126 tree
1127 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1129 return int_const_binop_1 (code, arg1, arg2, 1);
1132 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1133 constant. We assume ARG1 and ARG2 have the same data type, or at least
1134 are the same kind of constant and the same machine mode. Return zero if
1135 combining the constants is not allowed in the current operating mode. */
1137 static tree
1138 const_binop (enum tree_code code, tree arg1, tree arg2)
1140 /* Sanity check for the recursive cases. */
1141 if (!arg1 || !arg2)
1142 return NULL_TREE;
1144 STRIP_NOPS (arg1);
1145 STRIP_NOPS (arg2);
1147 if (TREE_CODE (arg1) == INTEGER_CST)
1148 return int_const_binop (code, arg1, arg2);
1150 if (TREE_CODE (arg1) == REAL_CST)
1152 enum machine_mode mode;
1153 REAL_VALUE_TYPE d1;
1154 REAL_VALUE_TYPE d2;
1155 REAL_VALUE_TYPE value;
1156 REAL_VALUE_TYPE result;
1157 bool inexact;
1158 tree t, type;
1160 /* The following codes are handled by real_arithmetic. */
1161 switch (code)
1163 case PLUS_EXPR:
1164 case MINUS_EXPR:
1165 case MULT_EXPR:
1166 case RDIV_EXPR:
1167 case MIN_EXPR:
1168 case MAX_EXPR:
1169 break;
1171 default:
1172 return NULL_TREE;
1175 d1 = TREE_REAL_CST (arg1);
1176 d2 = TREE_REAL_CST (arg2);
1178 type = TREE_TYPE (arg1);
1179 mode = TYPE_MODE (type);
1181 /* Don't perform operation if we honor signaling NaNs and
1182 either operand is a NaN. */
1183 if (HONOR_SNANS (mode)
1184 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1185 return NULL_TREE;
1187 /* Don't perform operation if it would raise a division
1188 by zero exception. */
1189 if (code == RDIV_EXPR
1190 && REAL_VALUES_EQUAL (d2, dconst0)
1191 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1192 return NULL_TREE;
1194 /* If either operand is a NaN, just return it. Otherwise, set up
1195 for floating-point trap; we return an overflow. */
1196 if (REAL_VALUE_ISNAN (d1))
1197 return arg1;
1198 else if (REAL_VALUE_ISNAN (d2))
1199 return arg2;
1201 inexact = real_arithmetic (&value, code, &d1, &d2);
1202 real_convert (&result, mode, &value);
1204 /* Don't constant fold this floating point operation if
1205 the result has overflowed and flag_trapping_math. */
1206 if (flag_trapping_math
1207 && MODE_HAS_INFINITIES (mode)
1208 && REAL_VALUE_ISINF (result)
1209 && !REAL_VALUE_ISINF (d1)
1210 && !REAL_VALUE_ISINF (d2))
1211 return NULL_TREE;
1213 /* Don't constant fold this floating point operation if the
1214 result may dependent upon the run-time rounding mode and
1215 flag_rounding_math is set, or if GCC's software emulation
1216 is unable to accurately represent the result. */
1217 if ((flag_rounding_math
1218 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1219 && (inexact || !real_identical (&result, &value)))
1220 return NULL_TREE;
1222 t = build_real (type, result);
1224 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1225 return t;
1228 if (TREE_CODE (arg1) == FIXED_CST)
1230 FIXED_VALUE_TYPE f1;
1231 FIXED_VALUE_TYPE f2;
1232 FIXED_VALUE_TYPE result;
1233 tree t, type;
1234 int sat_p;
1235 bool overflow_p;
1237 /* The following codes are handled by fixed_arithmetic. */
1238 switch (code)
1240 case PLUS_EXPR:
1241 case MINUS_EXPR:
1242 case MULT_EXPR:
1243 case TRUNC_DIV_EXPR:
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1249 f2.data.high = TREE_INT_CST_HIGH (arg2);
1250 f2.data.low = TREE_INT_CST_LOW (arg2);
1251 f2.mode = SImode;
1252 break;
1254 default:
1255 return NULL_TREE;
1258 f1 = TREE_FIXED_CST (arg1);
1259 type = TREE_TYPE (arg1);
1260 sat_p = TYPE_SATURATING (type);
1261 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1262 t = build_fixed (type, result);
1263 /* Propagate overflow flags. */
1264 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1265 TREE_OVERFLOW (t) = 1;
1266 return t;
1269 if (TREE_CODE (arg1) == COMPLEX_CST)
1271 tree type = TREE_TYPE (arg1);
1272 tree r1 = TREE_REALPART (arg1);
1273 tree i1 = TREE_IMAGPART (arg1);
1274 tree r2 = TREE_REALPART (arg2);
1275 tree i2 = TREE_IMAGPART (arg2);
1276 tree real, imag;
1278 switch (code)
1280 case PLUS_EXPR:
1281 case MINUS_EXPR:
1282 real = const_binop (code, r1, r2);
1283 imag = const_binop (code, i1, i2);
1284 break;
1286 case MULT_EXPR:
1287 if (COMPLEX_FLOAT_TYPE_P (type))
1288 return do_mpc_arg2 (arg1, arg2, type,
1289 /* do_nonfinite= */ folding_initializer,
1290 mpc_mul);
1292 real = const_binop (MINUS_EXPR,
1293 const_binop (MULT_EXPR, r1, r2),
1294 const_binop (MULT_EXPR, i1, i2));
1295 imag = const_binop (PLUS_EXPR,
1296 const_binop (MULT_EXPR, r1, i2),
1297 const_binop (MULT_EXPR, i1, r2));
1298 break;
1300 case RDIV_EXPR:
1301 if (COMPLEX_FLOAT_TYPE_P (type))
1302 return do_mpc_arg2 (arg1, arg2, type,
1303 /* do_nonfinite= */ folding_initializer,
1304 mpc_div);
1305 /* Fallthru ... */
1306 case TRUNC_DIV_EXPR:
1307 case CEIL_DIV_EXPR:
1308 case FLOOR_DIV_EXPR:
1309 case ROUND_DIV_EXPR:
1310 if (flag_complex_method == 0)
1312 /* Keep this algorithm in sync with
1313 tree-complex.c:expand_complex_div_straight().
1315 Expand complex division to scalars, straightforward algorithm.
1316 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1317 t = br*br + bi*bi
1319 tree magsquared
1320 = const_binop (PLUS_EXPR,
1321 const_binop (MULT_EXPR, r2, r2),
1322 const_binop (MULT_EXPR, i2, i2));
1323 tree t1
1324 = const_binop (PLUS_EXPR,
1325 const_binop (MULT_EXPR, r1, r2),
1326 const_binop (MULT_EXPR, i1, i2));
1327 tree t2
1328 = const_binop (MINUS_EXPR,
1329 const_binop (MULT_EXPR, i1, r2),
1330 const_binop (MULT_EXPR, r1, i2));
1332 real = const_binop (code, t1, magsquared);
1333 imag = const_binop (code, t2, magsquared);
1335 else
1337 /* Keep this algorithm in sync with
1338 tree-complex.c:expand_complex_div_wide().
1340 Expand complex division to scalars, modified algorithm to minimize
1341 overflow with wide input ranges. */
1342 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1343 fold_abs_const (r2, TREE_TYPE (type)),
1344 fold_abs_const (i2, TREE_TYPE (type)));
1346 if (integer_nonzerop (compare))
1348 /* In the TRUE branch, we compute
1349 ratio = br/bi;
1350 div = (br * ratio) + bi;
1351 tr = (ar * ratio) + ai;
1352 ti = (ai * ratio) - ar;
1353 tr = tr / div;
1354 ti = ti / div; */
1355 tree ratio = const_binop (code, r2, i2);
1356 tree div = const_binop (PLUS_EXPR, i2,
1357 const_binop (MULT_EXPR, r2, ratio));
1358 real = const_binop (MULT_EXPR, r1, ratio);
1359 real = const_binop (PLUS_EXPR, real, i1);
1360 real = const_binop (code, real, div);
1362 imag = const_binop (MULT_EXPR, i1, ratio);
1363 imag = const_binop (MINUS_EXPR, imag, r1);
1364 imag = const_binop (code, imag, div);
1366 else
1368 /* In the FALSE branch, we compute
1369 ratio = d/c;
1370 divisor = (d * ratio) + c;
1371 tr = (b * ratio) + a;
1372 ti = b - (a * ratio);
1373 tr = tr / div;
1374 ti = ti / div; */
1375 tree ratio = const_binop (code, i2, r2);
1376 tree div = const_binop (PLUS_EXPR, r2,
1377 const_binop (MULT_EXPR, i2, ratio));
1379 real = const_binop (MULT_EXPR, i1, ratio);
1380 real = const_binop (PLUS_EXPR, real, r1);
1381 real = const_binop (code, real, div);
1383 imag = const_binop (MULT_EXPR, r1, ratio);
1384 imag = const_binop (MINUS_EXPR, i1, imag);
1385 imag = const_binop (code, imag, div);
1388 break;
1390 default:
1391 return NULL_TREE;
1394 if (real && imag)
1395 return build_complex (type, real, imag);
1398 if (TREE_CODE (arg1) == VECTOR_CST
1399 && TREE_CODE (arg2) == VECTOR_CST)
1401 tree type = TREE_TYPE (arg1);
1402 int count = TYPE_VECTOR_SUBPARTS (type), i;
1403 tree *elts = XALLOCAVEC (tree, count);
1405 for (i = 0; i < count; i++)
1407 tree elem1 = VECTOR_CST_ELT (arg1, i);
1408 tree elem2 = VECTOR_CST_ELT (arg2, i);
1410 elts[i] = const_binop (code, elem1, elem2);
1412 /* It is possible that const_binop cannot handle the given
1413 code and return NULL_TREE */
1414 if (elts[i] == NULL_TREE)
1415 return NULL_TREE;
1418 return build_vector (type, elts);
1421 /* Shifts allow a scalar offset for a vector. */
1422 if (TREE_CODE (arg1) == VECTOR_CST
1423 && TREE_CODE (arg2) == INTEGER_CST)
1425 tree type = TREE_TYPE (arg1);
1426 int count = TYPE_VECTOR_SUBPARTS (type), i;
1427 tree *elts = XALLOCAVEC (tree, count);
1429 if (code == VEC_LSHIFT_EXPR
1430 || code == VEC_RSHIFT_EXPR)
1432 if (!host_integerp (arg2, 1))
1433 return NULL_TREE;
1435 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1436 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1437 unsigned HOST_WIDE_INT innerc
1438 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1439 if (shiftc >= outerc || (shiftc % innerc) != 0)
1440 return NULL_TREE;
1441 int offset = shiftc / innerc;
1442 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1443 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1444 for !BYTES_BIG_ENDIAN picks first vector element, but
1445 for BYTES_BIG_ENDIAN last element from the vector. */
1446 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1447 offset = -offset;
1448 tree zero = build_zero_cst (TREE_TYPE (type));
1449 for (i = 0; i < count; i++)
1451 if (i + offset < 0 || i + offset >= count)
1452 elts[i] = zero;
1453 else
1454 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1457 else
1458 for (i = 0; i < count; i++)
1460 tree elem1 = VECTOR_CST_ELT (arg1, i);
1462 elts[i] = const_binop (code, elem1, arg2);
1464 /* It is possible that const_binop cannot handle the given
1465 code and return NULL_TREE */
1466 if (elts[i] == NULL_TREE)
1467 return NULL_TREE;
1470 return build_vector (type, elts);
1472 return NULL_TREE;
1475 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1476 indicates which particular sizetype to create. */
1478 tree
1479 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1481 return build_int_cst (sizetype_tab[(int) kind], number);
1484 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1485 is a tree code. The type of the result is taken from the operands.
1486 Both must be equivalent integer types, ala int_binop_types_match_p.
1487 If the operands are constant, so is the result. */
1489 tree
1490 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1492 tree type = TREE_TYPE (arg0);
1494 if (arg0 == error_mark_node || arg1 == error_mark_node)
1495 return error_mark_node;
1497 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1498 TREE_TYPE (arg1)));
1500 /* Handle the special case of two integer constants faster. */
1501 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1503 /* And some specific cases even faster than that. */
1504 if (code == PLUS_EXPR)
1506 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1507 return arg1;
1508 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1509 return arg0;
1511 else if (code == MINUS_EXPR)
1513 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1514 return arg0;
1516 else if (code == MULT_EXPR)
1518 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1519 return arg1;
1522 /* Handle general case of two integer constants. For sizetype
1523 constant calculations we always want to know about overflow,
1524 even in the unsigned case. */
1525 return int_const_binop_1 (code, arg0, arg1, -1);
1528 return fold_build2_loc (loc, code, type, arg0, arg1);
1531 /* Given two values, either both of sizetype or both of bitsizetype,
1532 compute the difference between the two values. Return the value
1533 in signed type corresponding to the type of the operands. */
1535 tree
1536 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1538 tree type = TREE_TYPE (arg0);
1539 tree ctype;
1541 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1542 TREE_TYPE (arg1)));
1544 /* If the type is already signed, just do the simple thing. */
1545 if (!TYPE_UNSIGNED (type))
1546 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1548 if (type == sizetype)
1549 ctype = ssizetype;
1550 else if (type == bitsizetype)
1551 ctype = sbitsizetype;
1552 else
1553 ctype = signed_type_for (type);
1555 /* If either operand is not a constant, do the conversions to the signed
1556 type and subtract. The hardware will do the right thing with any
1557 overflow in the subtraction. */
1558 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1559 return size_binop_loc (loc, MINUS_EXPR,
1560 fold_convert_loc (loc, ctype, arg0),
1561 fold_convert_loc (loc, ctype, arg1));
1563 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1564 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1565 overflow) and negate (which can't either). Special-case a result
1566 of zero while we're here. */
1567 if (tree_int_cst_equal (arg0, arg1))
1568 return build_int_cst (ctype, 0);
1569 else if (tree_int_cst_lt (arg1, arg0))
1570 return fold_convert_loc (loc, ctype,
1571 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1572 else
1573 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1574 fold_convert_loc (loc, ctype,
1575 size_binop_loc (loc,
1576 MINUS_EXPR,
1577 arg1, arg0)));
1580 /* A subroutine of fold_convert_const handling conversions of an
1581 INTEGER_CST to another integer type. */
1583 static tree
1584 fold_convert_const_int_from_int (tree type, const_tree arg1)
1586 tree t;
1588 /* Given an integer constant, make new constant with new type,
1589 appropriately sign-extended or truncated. */
1590 t = force_fit_type_double (type, tree_to_double_int (arg1),
1591 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1592 (TREE_INT_CST_HIGH (arg1) < 0
1593 && (TYPE_UNSIGNED (type)
1594 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1595 | TREE_OVERFLOW (arg1));
1597 return t;
1600 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1601 to an integer type. */
1603 static tree
1604 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1606 int overflow = 0;
1607 tree t;
1609 /* The following code implements the floating point to integer
1610 conversion rules required by the Java Language Specification,
1611 that IEEE NaNs are mapped to zero and values that overflow
1612 the target precision saturate, i.e. values greater than
1613 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1614 are mapped to INT_MIN. These semantics are allowed by the
1615 C and C++ standards that simply state that the behavior of
1616 FP-to-integer conversion is unspecified upon overflow. */
1618 double_int val;
1619 REAL_VALUE_TYPE r;
1620 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1622 switch (code)
1624 case FIX_TRUNC_EXPR:
1625 real_trunc (&r, VOIDmode, &x);
1626 break;
1628 default:
1629 gcc_unreachable ();
1632 /* If R is NaN, return zero and show we have an overflow. */
1633 if (REAL_VALUE_ISNAN (r))
1635 overflow = 1;
1636 val = double_int_zero;
1639 /* See if R is less than the lower bound or greater than the
1640 upper bound. */
1642 if (! overflow)
1644 tree lt = TYPE_MIN_VALUE (type);
1645 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1646 if (REAL_VALUES_LESS (r, l))
1648 overflow = 1;
1649 val = tree_to_double_int (lt);
1653 if (! overflow)
1655 tree ut = TYPE_MAX_VALUE (type);
1656 if (ut)
1658 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1659 if (REAL_VALUES_LESS (u, r))
1661 overflow = 1;
1662 val = tree_to_double_int (ut);
1667 if (! overflow)
1668 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1670 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1671 return t;
1674 /* A subroutine of fold_convert_const handling conversions of a
1675 FIXED_CST to an integer type. */
1677 static tree
1678 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1680 tree t;
1681 double_int temp, temp_trunc;
1682 unsigned int mode;
1684 /* Right shift FIXED_CST to temp by fbit. */
1685 temp = TREE_FIXED_CST (arg1).data;
1686 mode = TREE_FIXED_CST (arg1).mode;
1687 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1689 temp = temp.rshift (GET_MODE_FBIT (mode),
1690 HOST_BITS_PER_DOUBLE_INT,
1691 SIGNED_FIXED_POINT_MODE_P (mode));
1693 /* Left shift temp to temp_trunc by fbit. */
1694 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1695 HOST_BITS_PER_DOUBLE_INT,
1696 SIGNED_FIXED_POINT_MODE_P (mode));
1698 else
1700 temp = double_int_zero;
1701 temp_trunc = double_int_zero;
1704 /* If FIXED_CST is negative, we need to round the value toward 0.
1705 By checking if the fractional bits are not zero to add 1 to temp. */
1706 if (SIGNED_FIXED_POINT_MODE_P (mode)
1707 && temp_trunc.is_negative ()
1708 && TREE_FIXED_CST (arg1).data != temp_trunc)
1709 temp += double_int_one;
1711 /* Given a fixed-point constant, make new constant with new type,
1712 appropriately sign-extended or truncated. */
1713 t = force_fit_type_double (type, temp, -1,
1714 (temp.is_negative ()
1715 && (TYPE_UNSIGNED (type)
1716 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1717 | TREE_OVERFLOW (arg1));
1719 return t;
1722 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1723 to another floating point type. */
1725 static tree
1726 fold_convert_const_real_from_real (tree type, const_tree arg1)
1728 REAL_VALUE_TYPE value;
1729 tree t;
1731 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1732 t = build_real (type, value);
1734 /* If converting an infinity or NAN to a representation that doesn't
1735 have one, set the overflow bit so that we can produce some kind of
1736 error message at the appropriate point if necessary. It's not the
1737 most user-friendly message, but it's better than nothing. */
1738 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1739 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1740 TREE_OVERFLOW (t) = 1;
1741 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1742 && !MODE_HAS_NANS (TYPE_MODE (type)))
1743 TREE_OVERFLOW (t) = 1;
1744 /* Regular overflow, conversion produced an infinity in a mode that
1745 can't represent them. */
1746 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1747 && REAL_VALUE_ISINF (value)
1748 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1749 TREE_OVERFLOW (t) = 1;
1750 else
1751 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1752 return t;
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to a floating point type. */
1758 static tree
1759 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1761 REAL_VALUE_TYPE value;
1762 tree t;
1764 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1765 t = build_real (type, value);
1767 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1768 return t;
1771 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1772 to another fixed-point type. */
1774 static tree
1775 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1777 FIXED_VALUE_TYPE value;
1778 tree t;
1779 bool overflow_p;
1781 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1782 TYPE_SATURATING (type));
1783 t = build_fixed (type, value);
1785 /* Propagate overflow flags. */
1786 if (overflow_p | TREE_OVERFLOW (arg1))
1787 TREE_OVERFLOW (t) = 1;
1788 return t;
1791 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1792 to a fixed-point type. */
1794 static tree
1795 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1797 FIXED_VALUE_TYPE value;
1798 tree t;
1799 bool overflow_p;
1801 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1802 TREE_INT_CST (arg1),
1803 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1804 TYPE_SATURATING (type));
1805 t = build_fixed (type, value);
1807 /* Propagate overflow flags. */
1808 if (overflow_p | TREE_OVERFLOW (arg1))
1809 TREE_OVERFLOW (t) = 1;
1810 return t;
1813 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1814 to a fixed-point type. */
1816 static tree
1817 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1819 FIXED_VALUE_TYPE value;
1820 tree t;
1821 bool overflow_p;
1823 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1824 &TREE_REAL_CST (arg1),
1825 TYPE_SATURATING (type));
1826 t = build_fixed (type, value);
1828 /* Propagate overflow flags. */
1829 if (overflow_p | TREE_OVERFLOW (arg1))
1830 TREE_OVERFLOW (t) = 1;
1831 return t;
1834 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1835 type TYPE. If no simplification can be done return NULL_TREE. */
1837 static tree
1838 fold_convert_const (enum tree_code code, tree type, tree arg1)
1840 if (TREE_TYPE (arg1) == type)
1841 return arg1;
1843 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1844 || TREE_CODE (type) == OFFSET_TYPE)
1846 if (TREE_CODE (arg1) == INTEGER_CST)
1847 return fold_convert_const_int_from_int (type, arg1);
1848 else if (TREE_CODE (arg1) == REAL_CST)
1849 return fold_convert_const_int_from_real (code, type, arg1);
1850 else if (TREE_CODE (arg1) == FIXED_CST)
1851 return fold_convert_const_int_from_fixed (type, arg1);
1853 else if (TREE_CODE (type) == REAL_TYPE)
1855 if (TREE_CODE (arg1) == INTEGER_CST)
1856 return build_real_from_int_cst (type, arg1);
1857 else if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_real_from_real (type, arg1);
1859 else if (TREE_CODE (arg1) == FIXED_CST)
1860 return fold_convert_const_real_from_fixed (type, arg1);
1862 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1864 if (TREE_CODE (arg1) == FIXED_CST)
1865 return fold_convert_const_fixed_from_fixed (type, arg1);
1866 else if (TREE_CODE (arg1) == INTEGER_CST)
1867 return fold_convert_const_fixed_from_int (type, arg1);
1868 else if (TREE_CODE (arg1) == REAL_CST)
1869 return fold_convert_const_fixed_from_real (type, arg1);
1871 return NULL_TREE;
1874 /* Construct a vector of zero elements of vector type TYPE. */
1876 static tree
1877 build_zero_vector (tree type)
1879 tree t;
1881 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1882 return build_vector_from_val (type, t);
1885 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1887 bool
1888 fold_convertible_p (const_tree type, const_tree arg)
1890 tree orig = TREE_TYPE (arg);
1892 if (type == orig)
1893 return true;
1895 if (TREE_CODE (arg) == ERROR_MARK
1896 || TREE_CODE (type) == ERROR_MARK
1897 || TREE_CODE (orig) == ERROR_MARK)
1898 return false;
1900 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1901 return true;
1903 switch (TREE_CODE (type))
1905 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1906 case POINTER_TYPE: case REFERENCE_TYPE:
1907 case OFFSET_TYPE:
1908 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1909 || TREE_CODE (orig) == OFFSET_TYPE)
1910 return true;
1911 return (TREE_CODE (orig) == VECTOR_TYPE
1912 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1914 case REAL_TYPE:
1915 case FIXED_POINT_TYPE:
1916 case COMPLEX_TYPE:
1917 case VECTOR_TYPE:
1918 case VOID_TYPE:
1919 return TREE_CODE (type) == TREE_CODE (orig);
1921 default:
1922 return false;
1926 /* Convert expression ARG to type TYPE. Used by the middle-end for
1927 simple conversions in preference to calling the front-end's convert. */
1929 tree
1930 fold_convert_loc (location_t loc, tree type, tree arg)
1932 tree orig = TREE_TYPE (arg);
1933 tree tem;
1935 if (type == orig)
1936 return arg;
1938 if (TREE_CODE (arg) == ERROR_MARK
1939 || TREE_CODE (type) == ERROR_MARK
1940 || TREE_CODE (orig) == ERROR_MARK)
1941 return error_mark_node;
1943 switch (TREE_CODE (type))
1945 case POINTER_TYPE:
1946 case REFERENCE_TYPE:
1947 /* Handle conversions between pointers to different address spaces. */
1948 if (POINTER_TYPE_P (orig)
1949 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1950 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1951 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1952 /* fall through */
1954 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1955 case OFFSET_TYPE:
1956 if (TREE_CODE (arg) == INTEGER_CST)
1958 tem = fold_convert_const (NOP_EXPR, type, arg);
1959 if (tem != NULL_TREE)
1960 return tem;
1962 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1963 || TREE_CODE (orig) == OFFSET_TYPE)
1964 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1965 if (TREE_CODE (orig) == COMPLEX_TYPE)
1966 return fold_convert_loc (loc, type,
1967 fold_build1_loc (loc, REALPART_EXPR,
1968 TREE_TYPE (orig), arg));
1969 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1970 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1971 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1973 case REAL_TYPE:
1974 if (TREE_CODE (arg) == INTEGER_CST)
1976 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1977 if (tem != NULL_TREE)
1978 return tem;
1980 else if (TREE_CODE (arg) == REAL_CST)
1982 tem = fold_convert_const (NOP_EXPR, type, arg);
1983 if (tem != NULL_TREE)
1984 return tem;
1986 else if (TREE_CODE (arg) == FIXED_CST)
1988 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1989 if (tem != NULL_TREE)
1990 return tem;
1993 switch (TREE_CODE (orig))
1995 case INTEGER_TYPE:
1996 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1997 case POINTER_TYPE: case REFERENCE_TYPE:
1998 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2000 case REAL_TYPE:
2001 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2003 case FIXED_POINT_TYPE:
2004 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2006 case COMPLEX_TYPE:
2007 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2008 return fold_convert_loc (loc, type, tem);
2010 default:
2011 gcc_unreachable ();
2014 case FIXED_POINT_TYPE:
2015 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2016 || TREE_CODE (arg) == REAL_CST)
2018 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2019 if (tem != NULL_TREE)
2020 goto fold_convert_exit;
2023 switch (TREE_CODE (orig))
2025 case FIXED_POINT_TYPE:
2026 case INTEGER_TYPE:
2027 case ENUMERAL_TYPE:
2028 case BOOLEAN_TYPE:
2029 case REAL_TYPE:
2030 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2032 case COMPLEX_TYPE:
2033 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2034 return fold_convert_loc (loc, type, tem);
2036 default:
2037 gcc_unreachable ();
2040 case COMPLEX_TYPE:
2041 switch (TREE_CODE (orig))
2043 case INTEGER_TYPE:
2044 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2045 case POINTER_TYPE: case REFERENCE_TYPE:
2046 case REAL_TYPE:
2047 case FIXED_POINT_TYPE:
2048 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2049 fold_convert_loc (loc, TREE_TYPE (type), arg),
2050 fold_convert_loc (loc, TREE_TYPE (type),
2051 integer_zero_node));
2052 case COMPLEX_TYPE:
2054 tree rpart, ipart;
2056 if (TREE_CODE (arg) == COMPLEX_EXPR)
2058 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2059 TREE_OPERAND (arg, 0));
2060 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2061 TREE_OPERAND (arg, 1));
2062 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2065 arg = save_expr (arg);
2066 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2067 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2068 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2069 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2070 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2073 default:
2074 gcc_unreachable ();
2077 case VECTOR_TYPE:
2078 if (integer_zerop (arg))
2079 return build_zero_vector (type);
2080 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2081 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2082 || TREE_CODE (orig) == VECTOR_TYPE);
2083 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2085 case VOID_TYPE:
2086 tem = fold_ignored_result (arg);
2087 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2089 default:
2090 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2091 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2092 gcc_unreachable ();
2094 fold_convert_exit:
2095 protected_set_expr_location_unshare (tem, loc);
2096 return tem;
2099 /* Return false if expr can be assumed not to be an lvalue, true
2100 otherwise. */
2102 static bool
2103 maybe_lvalue_p (const_tree x)
2105 /* We only need to wrap lvalue tree codes. */
2106 switch (TREE_CODE (x))
2108 case VAR_DECL:
2109 case PARM_DECL:
2110 case RESULT_DECL:
2111 case LABEL_DECL:
2112 case FUNCTION_DECL:
2113 case SSA_NAME:
2115 case COMPONENT_REF:
2116 case MEM_REF:
2117 case INDIRECT_REF:
2118 case ARRAY_REF:
2119 case ARRAY_RANGE_REF:
2120 case BIT_FIELD_REF:
2121 case OBJ_TYPE_REF:
2123 case REALPART_EXPR:
2124 case IMAGPART_EXPR:
2125 case PREINCREMENT_EXPR:
2126 case PREDECREMENT_EXPR:
2127 case SAVE_EXPR:
2128 case TRY_CATCH_EXPR:
2129 case WITH_CLEANUP_EXPR:
2130 case COMPOUND_EXPR:
2131 case MODIFY_EXPR:
2132 case TARGET_EXPR:
2133 case COND_EXPR:
2134 case BIND_EXPR:
2135 break;
2137 default:
2138 /* Assume the worst for front-end tree codes. */
2139 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2140 break;
2141 return false;
2144 return true;
2147 /* Return an expr equal to X but certainly not valid as an lvalue. */
2149 tree
2150 non_lvalue_loc (location_t loc, tree x)
2152 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2153 us. */
2154 if (in_gimple_form)
2155 return x;
2157 if (! maybe_lvalue_p (x))
2158 return x;
2159 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2162 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2163 Zero means allow extended lvalues. */
2165 int pedantic_lvalues;
2167 /* When pedantic, return an expr equal to X but certainly not valid as a
2168 pedantic lvalue. Otherwise, return X. */
2170 static tree
2171 pedantic_non_lvalue_loc (location_t loc, tree x)
2173 if (pedantic_lvalues)
2174 return non_lvalue_loc (loc, x);
2176 return protected_set_expr_location_unshare (x, loc);
2179 /* Given a tree comparison code, return the code that is the logical inverse.
2180 It is generally not safe to do this for floating-point comparisons, except
2181 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2182 ERROR_MARK in this case. */
2184 enum tree_code
2185 invert_tree_comparison (enum tree_code code, bool honor_nans)
2187 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2188 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2189 return ERROR_MARK;
2191 switch (code)
2193 case EQ_EXPR:
2194 return NE_EXPR;
2195 case NE_EXPR:
2196 return EQ_EXPR;
2197 case GT_EXPR:
2198 return honor_nans ? UNLE_EXPR : LE_EXPR;
2199 case GE_EXPR:
2200 return honor_nans ? UNLT_EXPR : LT_EXPR;
2201 case LT_EXPR:
2202 return honor_nans ? UNGE_EXPR : GE_EXPR;
2203 case LE_EXPR:
2204 return honor_nans ? UNGT_EXPR : GT_EXPR;
2205 case LTGT_EXPR:
2206 return UNEQ_EXPR;
2207 case UNEQ_EXPR:
2208 return LTGT_EXPR;
2209 case UNGT_EXPR:
2210 return LE_EXPR;
2211 case UNGE_EXPR:
2212 return LT_EXPR;
2213 case UNLT_EXPR:
2214 return GE_EXPR;
2215 case UNLE_EXPR:
2216 return GT_EXPR;
2217 case ORDERED_EXPR:
2218 return UNORDERED_EXPR;
2219 case UNORDERED_EXPR:
2220 return ORDERED_EXPR;
2221 default:
2222 gcc_unreachable ();
2226 /* Similar, but return the comparison that results if the operands are
2227 swapped. This is safe for floating-point. */
2229 enum tree_code
2230 swap_tree_comparison (enum tree_code code)
2232 switch (code)
2234 case EQ_EXPR:
2235 case NE_EXPR:
2236 case ORDERED_EXPR:
2237 case UNORDERED_EXPR:
2238 case LTGT_EXPR:
2239 case UNEQ_EXPR:
2240 return code;
2241 case GT_EXPR:
2242 return LT_EXPR;
2243 case GE_EXPR:
2244 return LE_EXPR;
2245 case LT_EXPR:
2246 return GT_EXPR;
2247 case LE_EXPR:
2248 return GE_EXPR;
2249 case UNGT_EXPR:
2250 return UNLT_EXPR;
2251 case UNGE_EXPR:
2252 return UNLE_EXPR;
2253 case UNLT_EXPR:
2254 return UNGT_EXPR;
2255 case UNLE_EXPR:
2256 return UNGE_EXPR;
2257 default:
2258 gcc_unreachable ();
2263 /* Convert a comparison tree code from an enum tree_code representation
2264 into a compcode bit-based encoding. This function is the inverse of
2265 compcode_to_comparison. */
2267 static enum comparison_code
2268 comparison_to_compcode (enum tree_code code)
2270 switch (code)
2272 case LT_EXPR:
2273 return COMPCODE_LT;
2274 case EQ_EXPR:
2275 return COMPCODE_EQ;
2276 case LE_EXPR:
2277 return COMPCODE_LE;
2278 case GT_EXPR:
2279 return COMPCODE_GT;
2280 case NE_EXPR:
2281 return COMPCODE_NE;
2282 case GE_EXPR:
2283 return COMPCODE_GE;
2284 case ORDERED_EXPR:
2285 return COMPCODE_ORD;
2286 case UNORDERED_EXPR:
2287 return COMPCODE_UNORD;
2288 case UNLT_EXPR:
2289 return COMPCODE_UNLT;
2290 case UNEQ_EXPR:
2291 return COMPCODE_UNEQ;
2292 case UNLE_EXPR:
2293 return COMPCODE_UNLE;
2294 case UNGT_EXPR:
2295 return COMPCODE_UNGT;
2296 case LTGT_EXPR:
2297 return COMPCODE_LTGT;
2298 case UNGE_EXPR:
2299 return COMPCODE_UNGE;
2300 default:
2301 gcc_unreachable ();
2305 /* Convert a compcode bit-based encoding of a comparison operator back
2306 to GCC's enum tree_code representation. This function is the
2307 inverse of comparison_to_compcode. */
2309 static enum tree_code
2310 compcode_to_comparison (enum comparison_code code)
2312 switch (code)
2314 case COMPCODE_LT:
2315 return LT_EXPR;
2316 case COMPCODE_EQ:
2317 return EQ_EXPR;
2318 case COMPCODE_LE:
2319 return LE_EXPR;
2320 case COMPCODE_GT:
2321 return GT_EXPR;
2322 case COMPCODE_NE:
2323 return NE_EXPR;
2324 case COMPCODE_GE:
2325 return GE_EXPR;
2326 case COMPCODE_ORD:
2327 return ORDERED_EXPR;
2328 case COMPCODE_UNORD:
2329 return UNORDERED_EXPR;
2330 case COMPCODE_UNLT:
2331 return UNLT_EXPR;
2332 case COMPCODE_UNEQ:
2333 return UNEQ_EXPR;
2334 case COMPCODE_UNLE:
2335 return UNLE_EXPR;
2336 case COMPCODE_UNGT:
2337 return UNGT_EXPR;
2338 case COMPCODE_LTGT:
2339 return LTGT_EXPR;
2340 case COMPCODE_UNGE:
2341 return UNGE_EXPR;
2342 default:
2343 gcc_unreachable ();
2347 /* Return a tree for the comparison which is the combination of
2348 doing the AND or OR (depending on CODE) of the two operations LCODE
2349 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2350 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2351 if this makes the transformation invalid. */
2353 tree
2354 combine_comparisons (location_t loc,
2355 enum tree_code code, enum tree_code lcode,
2356 enum tree_code rcode, tree truth_type,
2357 tree ll_arg, tree lr_arg)
2359 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2360 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2361 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2362 int compcode;
2364 switch (code)
2366 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2367 compcode = lcompcode & rcompcode;
2368 break;
2370 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2371 compcode = lcompcode | rcompcode;
2372 break;
2374 default:
2375 return NULL_TREE;
2378 if (!honor_nans)
2380 /* Eliminate unordered comparisons, as well as LTGT and ORD
2381 which are not used unless the mode has NaNs. */
2382 compcode &= ~COMPCODE_UNORD;
2383 if (compcode == COMPCODE_LTGT)
2384 compcode = COMPCODE_NE;
2385 else if (compcode == COMPCODE_ORD)
2386 compcode = COMPCODE_TRUE;
2388 else if (flag_trapping_math)
2390 /* Check that the original operation and the optimized ones will trap
2391 under the same condition. */
2392 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2393 && (lcompcode != COMPCODE_EQ)
2394 && (lcompcode != COMPCODE_ORD);
2395 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2396 && (rcompcode != COMPCODE_EQ)
2397 && (rcompcode != COMPCODE_ORD);
2398 bool trap = (compcode & COMPCODE_UNORD) == 0
2399 && (compcode != COMPCODE_EQ)
2400 && (compcode != COMPCODE_ORD);
2402 /* In a short-circuited boolean expression the LHS might be
2403 such that the RHS, if evaluated, will never trap. For
2404 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2405 if neither x nor y is NaN. (This is a mixed blessing: for
2406 example, the expression above will never trap, hence
2407 optimizing it to x < y would be invalid). */
2408 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2409 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2410 rtrap = false;
2412 /* If the comparison was short-circuited, and only the RHS
2413 trapped, we may now generate a spurious trap. */
2414 if (rtrap && !ltrap
2415 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2416 return NULL_TREE;
2418 /* If we changed the conditions that cause a trap, we lose. */
2419 if ((ltrap || rtrap) != trap)
2420 return NULL_TREE;
2423 if (compcode == COMPCODE_TRUE)
2424 return constant_boolean_node (true, truth_type);
2425 else if (compcode == COMPCODE_FALSE)
2426 return constant_boolean_node (false, truth_type);
2427 else
2429 enum tree_code tcode;
2431 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2432 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2436 /* Return nonzero if two operands (typically of the same tree node)
2437 are necessarily equal. If either argument has side-effects this
2438 function returns zero. FLAGS modifies behavior as follows:
2440 If OEP_ONLY_CONST is set, only return nonzero for constants.
2441 This function tests whether the operands are indistinguishable;
2442 it does not test whether they are equal using C's == operation.
2443 The distinction is important for IEEE floating point, because
2444 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2445 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2447 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2448 even though it may hold multiple values during a function.
2449 This is because a GCC tree node guarantees that nothing else is
2450 executed between the evaluation of its "operands" (which may often
2451 be evaluated in arbitrary order). Hence if the operands themselves
2452 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2453 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2454 unset means assuming isochronic (or instantaneous) tree equivalence.
2455 Unless comparing arbitrary expression trees, such as from different
2456 statements, this flag can usually be left unset.
2458 If OEP_PURE_SAME is set, then pure functions with identical arguments
2459 are considered the same. It is used when the caller has other ways
2460 to ensure that global memory is unchanged in between. */
2463 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2465 /* If either is ERROR_MARK, they aren't equal. */
2466 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2467 || TREE_TYPE (arg0) == error_mark_node
2468 || TREE_TYPE (arg1) == error_mark_node)
2469 return 0;
2471 /* Similar, if either does not have a type (like a released SSA name),
2472 they aren't equal. */
2473 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2474 return 0;
2476 /* Check equality of integer constants before bailing out due to
2477 precision differences. */
2478 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2479 return tree_int_cst_equal (arg0, arg1);
2481 /* If both types don't have the same signedness, then we can't consider
2482 them equal. We must check this before the STRIP_NOPS calls
2483 because they may change the signedness of the arguments. As pointers
2484 strictly don't have a signedness, require either two pointers or
2485 two non-pointers as well. */
2486 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2487 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2488 return 0;
2490 /* We cannot consider pointers to different address space equal. */
2491 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2492 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2493 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2494 return 0;
2496 /* If both types don't have the same precision, then it is not safe
2497 to strip NOPs. */
2498 if (element_precision (TREE_TYPE (arg0))
2499 != element_precision (TREE_TYPE (arg1)))
2500 return 0;
2502 STRIP_NOPS (arg0);
2503 STRIP_NOPS (arg1);
2505 /* In case both args are comparisons but with different comparison
2506 code, try to swap the comparison operands of one arg to produce
2507 a match and compare that variant. */
2508 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2509 && COMPARISON_CLASS_P (arg0)
2510 && COMPARISON_CLASS_P (arg1))
2512 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2514 if (TREE_CODE (arg0) == swap_code)
2515 return operand_equal_p (TREE_OPERAND (arg0, 0),
2516 TREE_OPERAND (arg1, 1), flags)
2517 && operand_equal_p (TREE_OPERAND (arg0, 1),
2518 TREE_OPERAND (arg1, 0), flags);
2521 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2522 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2523 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2524 return 0;
2526 /* This is needed for conversions and for COMPONENT_REF.
2527 Might as well play it safe and always test this. */
2528 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2529 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2530 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2531 return 0;
2533 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2534 We don't care about side effects in that case because the SAVE_EXPR
2535 takes care of that for us. In all other cases, two expressions are
2536 equal if they have no side effects. If we have two identical
2537 expressions with side effects that should be treated the same due
2538 to the only side effects being identical SAVE_EXPR's, that will
2539 be detected in the recursive calls below.
2540 If we are taking an invariant address of two identical objects
2541 they are necessarily equal as well. */
2542 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2543 && (TREE_CODE (arg0) == SAVE_EXPR
2544 || (flags & OEP_CONSTANT_ADDRESS_OF)
2545 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2546 return 1;
2548 /* Next handle constant cases, those for which we can return 1 even
2549 if ONLY_CONST is set. */
2550 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2551 switch (TREE_CODE (arg0))
2553 case INTEGER_CST:
2554 return tree_int_cst_equal (arg0, arg1);
2556 case FIXED_CST:
2557 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2558 TREE_FIXED_CST (arg1));
2560 case REAL_CST:
2561 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2562 TREE_REAL_CST (arg1)))
2563 return 1;
2566 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2568 /* If we do not distinguish between signed and unsigned zero,
2569 consider them equal. */
2570 if (real_zerop (arg0) && real_zerop (arg1))
2571 return 1;
2573 return 0;
2575 case VECTOR_CST:
2577 unsigned i;
2579 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2580 return 0;
2582 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2584 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2585 VECTOR_CST_ELT (arg1, i), flags))
2586 return 0;
2588 return 1;
2591 case COMPLEX_CST:
2592 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2593 flags)
2594 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2595 flags));
2597 case STRING_CST:
2598 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2599 && ! memcmp (TREE_STRING_POINTER (arg0),
2600 TREE_STRING_POINTER (arg1),
2601 TREE_STRING_LENGTH (arg0)));
2603 case ADDR_EXPR:
2604 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2605 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2606 ? OEP_CONSTANT_ADDRESS_OF : 0);
2607 default:
2608 break;
2611 if (flags & OEP_ONLY_CONST)
2612 return 0;
2614 /* Define macros to test an operand from arg0 and arg1 for equality and a
2615 variant that allows null and views null as being different from any
2616 non-null value. In the latter case, if either is null, the both
2617 must be; otherwise, do the normal comparison. */
2618 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2619 TREE_OPERAND (arg1, N), flags)
2621 #define OP_SAME_WITH_NULL(N) \
2622 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2623 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2625 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2627 case tcc_unary:
2628 /* Two conversions are equal only if signedness and modes match. */
2629 switch (TREE_CODE (arg0))
2631 CASE_CONVERT:
2632 case FIX_TRUNC_EXPR:
2633 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2634 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2635 return 0;
2636 break;
2637 default:
2638 break;
2641 return OP_SAME (0);
2644 case tcc_comparison:
2645 case tcc_binary:
2646 if (OP_SAME (0) && OP_SAME (1))
2647 return 1;
2649 /* For commutative ops, allow the other order. */
2650 return (commutative_tree_code (TREE_CODE (arg0))
2651 && operand_equal_p (TREE_OPERAND (arg0, 0),
2652 TREE_OPERAND (arg1, 1), flags)
2653 && operand_equal_p (TREE_OPERAND (arg0, 1),
2654 TREE_OPERAND (arg1, 0), flags));
2656 case tcc_reference:
2657 /* If either of the pointer (or reference) expressions we are
2658 dereferencing contain a side effect, these cannot be equal,
2659 but their addresses can be. */
2660 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2661 && (TREE_SIDE_EFFECTS (arg0)
2662 || TREE_SIDE_EFFECTS (arg1)))
2663 return 0;
2665 switch (TREE_CODE (arg0))
2667 case INDIRECT_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 return OP_SAME (0);
2671 case REALPART_EXPR:
2672 case IMAGPART_EXPR:
2673 return OP_SAME (0);
2675 case TARGET_MEM_REF:
2676 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2677 /* Require equal extra operands and then fall through to MEM_REF
2678 handling of the two common operands. */
2679 if (!OP_SAME_WITH_NULL (2)
2680 || !OP_SAME_WITH_NULL (3)
2681 || !OP_SAME_WITH_NULL (4))
2682 return 0;
2683 /* Fallthru. */
2684 case MEM_REF:
2685 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2686 /* Require equal access sizes, and similar pointer types.
2687 We can have incomplete types for array references of
2688 variable-sized arrays from the Fortran frontend
2689 though. Also verify the types are compatible. */
2690 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2691 || (TYPE_SIZE (TREE_TYPE (arg0))
2692 && TYPE_SIZE (TREE_TYPE (arg1))
2693 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2694 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2695 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2696 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2697 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2698 && OP_SAME (0) && OP_SAME (1));
2700 case ARRAY_REF:
2701 case ARRAY_RANGE_REF:
2702 /* Operands 2 and 3 may be null.
2703 Compare the array index by value if it is constant first as we
2704 may have different types but same value here. */
2705 if (!OP_SAME (0))
2706 return 0;
2707 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2708 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2709 TREE_OPERAND (arg1, 1))
2710 || OP_SAME (1))
2711 && OP_SAME_WITH_NULL (2)
2712 && OP_SAME_WITH_NULL (3));
2714 case COMPONENT_REF:
2715 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2716 may be NULL when we're called to compare MEM_EXPRs. */
2717 if (!OP_SAME_WITH_NULL (0))
2718 return 0;
2719 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2720 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2722 case BIT_FIELD_REF:
2723 if (!OP_SAME (0))
2724 return 0;
2725 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2726 return OP_SAME (1) && OP_SAME (2);
2728 default:
2729 return 0;
2732 case tcc_expression:
2733 switch (TREE_CODE (arg0))
2735 case ADDR_EXPR:
2736 case TRUTH_NOT_EXPR:
2737 return OP_SAME (0);
2739 case TRUTH_ANDIF_EXPR:
2740 case TRUTH_ORIF_EXPR:
2741 return OP_SAME (0) && OP_SAME (1);
2743 case FMA_EXPR:
2744 case WIDEN_MULT_PLUS_EXPR:
2745 case WIDEN_MULT_MINUS_EXPR:
2746 if (!OP_SAME (2))
2747 return 0;
2748 /* The multiplcation operands are commutative. */
2749 /* FALLTHRU */
2751 case TRUTH_AND_EXPR:
2752 case TRUTH_OR_EXPR:
2753 case TRUTH_XOR_EXPR:
2754 if (OP_SAME (0) && OP_SAME (1))
2755 return 1;
2757 /* Otherwise take into account this is a commutative operation. */
2758 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2759 TREE_OPERAND (arg1, 1), flags)
2760 && operand_equal_p (TREE_OPERAND (arg0, 1),
2761 TREE_OPERAND (arg1, 0), flags));
2763 case COND_EXPR:
2764 case VEC_COND_EXPR:
2765 case DOT_PROD_EXPR:
2766 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2768 default:
2769 return 0;
2772 case tcc_vl_exp:
2773 switch (TREE_CODE (arg0))
2775 case CALL_EXPR:
2776 /* If the CALL_EXPRs call different functions, then they
2777 clearly can not be equal. */
2778 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2779 flags))
2780 return 0;
2783 unsigned int cef = call_expr_flags (arg0);
2784 if (flags & OEP_PURE_SAME)
2785 cef &= ECF_CONST | ECF_PURE;
2786 else
2787 cef &= ECF_CONST;
2788 if (!cef)
2789 return 0;
2792 /* Now see if all the arguments are the same. */
2794 const_call_expr_arg_iterator iter0, iter1;
2795 const_tree a0, a1;
2796 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2797 a1 = first_const_call_expr_arg (arg1, &iter1);
2798 a0 && a1;
2799 a0 = next_const_call_expr_arg (&iter0),
2800 a1 = next_const_call_expr_arg (&iter1))
2801 if (! operand_equal_p (a0, a1, flags))
2802 return 0;
2804 /* If we get here and both argument lists are exhausted
2805 then the CALL_EXPRs are equal. */
2806 return ! (a0 || a1);
2808 default:
2809 return 0;
2812 case tcc_declaration:
2813 /* Consider __builtin_sqrt equal to sqrt. */
2814 return (TREE_CODE (arg0) == FUNCTION_DECL
2815 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2816 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2817 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2819 default:
2820 return 0;
2823 #undef OP_SAME
2824 #undef OP_SAME_WITH_NULL
2827 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2828 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2830 When in doubt, return 0. */
2832 static int
2833 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2835 int unsignedp1, unsignedpo;
2836 tree primarg0, primarg1, primother;
2837 unsigned int correct_width;
2839 if (operand_equal_p (arg0, arg1, 0))
2840 return 1;
2842 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2843 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2844 return 0;
2846 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2847 and see if the inner values are the same. This removes any
2848 signedness comparison, which doesn't matter here. */
2849 primarg0 = arg0, primarg1 = arg1;
2850 STRIP_NOPS (primarg0);
2851 STRIP_NOPS (primarg1);
2852 if (operand_equal_p (primarg0, primarg1, 0))
2853 return 1;
2855 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2856 actual comparison operand, ARG0.
2858 First throw away any conversions to wider types
2859 already present in the operands. */
2861 primarg1 = get_narrower (arg1, &unsignedp1);
2862 primother = get_narrower (other, &unsignedpo);
2864 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2865 if (unsignedp1 == unsignedpo
2866 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2867 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2869 tree type = TREE_TYPE (arg0);
2871 /* Make sure shorter operand is extended the right way
2872 to match the longer operand. */
2873 primarg1 = fold_convert (signed_or_unsigned_type_for
2874 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2876 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2877 return 1;
2880 return 0;
2883 /* See if ARG is an expression that is either a comparison or is performing
2884 arithmetic on comparisons. The comparisons must only be comparing
2885 two different values, which will be stored in *CVAL1 and *CVAL2; if
2886 they are nonzero it means that some operands have already been found.
2887 No variables may be used anywhere else in the expression except in the
2888 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2889 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2891 If this is true, return 1. Otherwise, return zero. */
2893 static int
2894 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2896 enum tree_code code = TREE_CODE (arg);
2897 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2899 /* We can handle some of the tcc_expression cases here. */
2900 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2901 tclass = tcc_unary;
2902 else if (tclass == tcc_expression
2903 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2904 || code == COMPOUND_EXPR))
2905 tclass = tcc_binary;
2907 else if (tclass == tcc_expression && code == SAVE_EXPR
2908 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2910 /* If we've already found a CVAL1 or CVAL2, this expression is
2911 two complex to handle. */
2912 if (*cval1 || *cval2)
2913 return 0;
2915 tclass = tcc_unary;
2916 *save_p = 1;
2919 switch (tclass)
2921 case tcc_unary:
2922 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2924 case tcc_binary:
2925 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2926 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2927 cval1, cval2, save_p));
2929 case tcc_constant:
2930 return 1;
2932 case tcc_expression:
2933 if (code == COND_EXPR)
2934 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2935 cval1, cval2, save_p)
2936 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2937 cval1, cval2, save_p)
2938 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2939 cval1, cval2, save_p));
2940 return 0;
2942 case tcc_comparison:
2943 /* First see if we can handle the first operand, then the second. For
2944 the second operand, we know *CVAL1 can't be zero. It must be that
2945 one side of the comparison is each of the values; test for the
2946 case where this isn't true by failing if the two operands
2947 are the same. */
2949 if (operand_equal_p (TREE_OPERAND (arg, 0),
2950 TREE_OPERAND (arg, 1), 0))
2951 return 0;
2953 if (*cval1 == 0)
2954 *cval1 = TREE_OPERAND (arg, 0);
2955 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2957 else if (*cval2 == 0)
2958 *cval2 = TREE_OPERAND (arg, 0);
2959 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2961 else
2962 return 0;
2964 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2966 else if (*cval2 == 0)
2967 *cval2 = TREE_OPERAND (arg, 1);
2968 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2970 else
2971 return 0;
2973 return 1;
2975 default:
2976 return 0;
2980 /* ARG is a tree that is known to contain just arithmetic operations and
2981 comparisons. Evaluate the operations in the tree substituting NEW0 for
2982 any occurrence of OLD0 as an operand of a comparison and likewise for
2983 NEW1 and OLD1. */
2985 static tree
2986 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2987 tree old1, tree new1)
2989 tree type = TREE_TYPE (arg);
2990 enum tree_code code = TREE_CODE (arg);
2991 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2993 /* We can handle some of the tcc_expression cases here. */
2994 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2995 tclass = tcc_unary;
2996 else if (tclass == tcc_expression
2997 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2998 tclass = tcc_binary;
3000 switch (tclass)
3002 case tcc_unary:
3003 return fold_build1_loc (loc, code, type,
3004 eval_subst (loc, TREE_OPERAND (arg, 0),
3005 old0, new0, old1, new1));
3007 case tcc_binary:
3008 return fold_build2_loc (loc, code, type,
3009 eval_subst (loc, TREE_OPERAND (arg, 0),
3010 old0, new0, old1, new1),
3011 eval_subst (loc, TREE_OPERAND (arg, 1),
3012 old0, new0, old1, new1));
3014 case tcc_expression:
3015 switch (code)
3017 case SAVE_EXPR:
3018 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3019 old1, new1);
3021 case COMPOUND_EXPR:
3022 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3023 old1, new1);
3025 case COND_EXPR:
3026 return fold_build3_loc (loc, code, type,
3027 eval_subst (loc, TREE_OPERAND (arg, 0),
3028 old0, new0, old1, new1),
3029 eval_subst (loc, TREE_OPERAND (arg, 1),
3030 old0, new0, old1, new1),
3031 eval_subst (loc, TREE_OPERAND (arg, 2),
3032 old0, new0, old1, new1));
3033 default:
3034 break;
3036 /* Fall through - ??? */
3038 case tcc_comparison:
3040 tree arg0 = TREE_OPERAND (arg, 0);
3041 tree arg1 = TREE_OPERAND (arg, 1);
3043 /* We need to check both for exact equality and tree equality. The
3044 former will be true if the operand has a side-effect. In that
3045 case, we know the operand occurred exactly once. */
3047 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3048 arg0 = new0;
3049 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3050 arg0 = new1;
3052 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3053 arg1 = new0;
3054 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3055 arg1 = new1;
3057 return fold_build2_loc (loc, code, type, arg0, arg1);
3060 default:
3061 return arg;
3065 /* Return a tree for the case when the result of an expression is RESULT
3066 converted to TYPE and OMITTED was previously an operand of the expression
3067 but is now not needed (e.g., we folded OMITTED * 0).
3069 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3070 the conversion of RESULT to TYPE. */
3072 tree
3073 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3075 tree t = fold_convert_loc (loc, type, result);
3077 /* If the resulting operand is an empty statement, just return the omitted
3078 statement casted to void. */
3079 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3080 return build1_loc (loc, NOP_EXPR, void_type_node,
3081 fold_ignored_result (omitted));
3083 if (TREE_SIDE_EFFECTS (omitted))
3084 return build2_loc (loc, COMPOUND_EXPR, type,
3085 fold_ignored_result (omitted), t);
3087 return non_lvalue_loc (loc, t);
3090 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3092 static tree
3093 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3094 tree omitted)
3096 tree t = fold_convert_loc (loc, type, result);
3098 /* If the resulting operand is an empty statement, just return the omitted
3099 statement casted to void. */
3100 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3101 return build1_loc (loc, NOP_EXPR, void_type_node,
3102 fold_ignored_result (omitted));
3104 if (TREE_SIDE_EFFECTS (omitted))
3105 return build2_loc (loc, COMPOUND_EXPR, type,
3106 fold_ignored_result (omitted), t);
3108 return pedantic_non_lvalue_loc (loc, t);
3111 /* Return a tree for the case when the result of an expression is RESULT
3112 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3113 of the expression but are now not needed.
3115 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3116 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3117 evaluated before OMITTED2. Otherwise, if neither has side effects,
3118 just do the conversion of RESULT to TYPE. */
3120 tree
3121 omit_two_operands_loc (location_t loc, tree type, tree result,
3122 tree omitted1, tree omitted2)
3124 tree t = fold_convert_loc (loc, type, result);
3126 if (TREE_SIDE_EFFECTS (omitted2))
3127 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3128 if (TREE_SIDE_EFFECTS (omitted1))
3129 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3131 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3135 /* Return a simplified tree node for the truth-negation of ARG. This
3136 never alters ARG itself. We assume that ARG is an operation that
3137 returns a truth value (0 or 1).
3139 FIXME: one would think we would fold the result, but it causes
3140 problems with the dominator optimizer. */
3142 static tree
3143 fold_truth_not_expr (location_t loc, tree arg)
3145 tree type = TREE_TYPE (arg);
3146 enum tree_code code = TREE_CODE (arg);
3147 location_t loc1, loc2;
3149 /* If this is a comparison, we can simply invert it, except for
3150 floating-point non-equality comparisons, in which case we just
3151 enclose a TRUTH_NOT_EXPR around what we have. */
3153 if (TREE_CODE_CLASS (code) == tcc_comparison)
3155 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3156 if (FLOAT_TYPE_P (op_type)
3157 && flag_trapping_math
3158 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3159 && code != NE_EXPR && code != EQ_EXPR)
3160 return NULL_TREE;
3162 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3163 if (code == ERROR_MARK)
3164 return NULL_TREE;
3166 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3167 TREE_OPERAND (arg, 1));
3170 switch (code)
3172 case INTEGER_CST:
3173 return constant_boolean_node (integer_zerop (arg), type);
3175 case TRUTH_AND_EXPR:
3176 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3177 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3178 return build2_loc (loc, TRUTH_OR_EXPR, type,
3179 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3180 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3182 case TRUTH_OR_EXPR:
3183 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3184 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3185 return build2_loc (loc, TRUTH_AND_EXPR, type,
3186 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3187 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3189 case TRUTH_XOR_EXPR:
3190 /* Here we can invert either operand. We invert the first operand
3191 unless the second operand is a TRUTH_NOT_EXPR in which case our
3192 result is the XOR of the first operand with the inside of the
3193 negation of the second operand. */
3195 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3196 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3197 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3198 else
3199 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3200 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3201 TREE_OPERAND (arg, 1));
3203 case TRUTH_ANDIF_EXPR:
3204 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3205 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3206 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3207 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3208 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3210 case TRUTH_ORIF_EXPR:
3211 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3212 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3213 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3214 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3215 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3217 case TRUTH_NOT_EXPR:
3218 return TREE_OPERAND (arg, 0);
3220 case COND_EXPR:
3222 tree arg1 = TREE_OPERAND (arg, 1);
3223 tree arg2 = TREE_OPERAND (arg, 2);
3225 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3226 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3228 /* A COND_EXPR may have a throw as one operand, which
3229 then has void type. Just leave void operands
3230 as they are. */
3231 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3232 VOID_TYPE_P (TREE_TYPE (arg1))
3233 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3234 VOID_TYPE_P (TREE_TYPE (arg2))
3235 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3238 case COMPOUND_EXPR:
3239 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3240 return build2_loc (loc, COMPOUND_EXPR, type,
3241 TREE_OPERAND (arg, 0),
3242 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3244 case NON_LVALUE_EXPR:
3245 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3246 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3248 CASE_CONVERT:
3249 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3250 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3252 /* ... fall through ... */
3254 case FLOAT_EXPR:
3255 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3256 return build1_loc (loc, TREE_CODE (arg), type,
3257 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3259 case BIT_AND_EXPR:
3260 if (!integer_onep (TREE_OPERAND (arg, 1)))
3261 return NULL_TREE;
3262 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3264 case SAVE_EXPR:
3265 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3267 case CLEANUP_POINT_EXPR:
3268 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3269 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3270 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3272 default:
3273 return NULL_TREE;
3277 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3278 assume that ARG is an operation that returns a truth value (0 or 1
3279 for scalars, 0 or -1 for vectors). Return the folded expression if
3280 folding is successful. Otherwise, return NULL_TREE. */
3282 static tree
3283 fold_invert_truthvalue (location_t loc, tree arg)
3285 tree type = TREE_TYPE (arg);
3286 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3287 ? BIT_NOT_EXPR
3288 : TRUTH_NOT_EXPR,
3289 type, arg);
3292 /* Return a simplified tree node for the truth-negation of ARG. This
3293 never alters ARG itself. We assume that ARG is an operation that
3294 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3296 tree
3297 invert_truthvalue_loc (location_t loc, tree arg)
3299 if (TREE_CODE (arg) == ERROR_MARK)
3300 return arg;
3302 tree type = TREE_TYPE (arg);
3303 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3304 ? BIT_NOT_EXPR
3305 : TRUTH_NOT_EXPR,
3306 type, arg);
3309 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3310 operands are another bit-wise operation with a common input. If so,
3311 distribute the bit operations to save an operation and possibly two if
3312 constants are involved. For example, convert
3313 (A | B) & (A | C) into A | (B & C)
3314 Further simplification will occur if B and C are constants.
3316 If this optimization cannot be done, 0 will be returned. */
3318 static tree
3319 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3320 tree arg0, tree arg1)
3322 tree common;
3323 tree left, right;
3325 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3326 || TREE_CODE (arg0) == code
3327 || (TREE_CODE (arg0) != BIT_AND_EXPR
3328 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3329 return 0;
3331 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3333 common = TREE_OPERAND (arg0, 0);
3334 left = TREE_OPERAND (arg0, 1);
3335 right = TREE_OPERAND (arg1, 1);
3337 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3339 common = TREE_OPERAND (arg0, 0);
3340 left = TREE_OPERAND (arg0, 1);
3341 right = TREE_OPERAND (arg1, 0);
3343 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3345 common = TREE_OPERAND (arg0, 1);
3346 left = TREE_OPERAND (arg0, 0);
3347 right = TREE_OPERAND (arg1, 1);
3349 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3351 common = TREE_OPERAND (arg0, 1);
3352 left = TREE_OPERAND (arg0, 0);
3353 right = TREE_OPERAND (arg1, 0);
3355 else
3356 return 0;
3358 common = fold_convert_loc (loc, type, common);
3359 left = fold_convert_loc (loc, type, left);
3360 right = fold_convert_loc (loc, type, right);
3361 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3362 fold_build2_loc (loc, code, type, left, right));
3365 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3366 with code CODE. This optimization is unsafe. */
3367 static tree
3368 distribute_real_division (location_t loc, enum tree_code code, tree type,
3369 tree arg0, tree arg1)
3371 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3372 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3374 /* (A / C) +- (B / C) -> (A +- B) / C. */
3375 if (mul0 == mul1
3376 && operand_equal_p (TREE_OPERAND (arg0, 1),
3377 TREE_OPERAND (arg1, 1), 0))
3378 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3379 fold_build2_loc (loc, code, type,
3380 TREE_OPERAND (arg0, 0),
3381 TREE_OPERAND (arg1, 0)),
3382 TREE_OPERAND (arg0, 1));
3384 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3385 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3386 TREE_OPERAND (arg1, 0), 0)
3387 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3388 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3390 REAL_VALUE_TYPE r0, r1;
3391 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3392 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3393 if (!mul0)
3394 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3395 if (!mul1)
3396 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3397 real_arithmetic (&r0, code, &r0, &r1);
3398 return fold_build2_loc (loc, MULT_EXPR, type,
3399 TREE_OPERAND (arg0, 0),
3400 build_real (type, r0));
3403 return NULL_TREE;
3406 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3407 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3409 static tree
3410 make_bit_field_ref (location_t loc, tree inner, tree type,
3411 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3413 tree result, bftype;
3415 if (bitpos == 0)
3417 tree size = TYPE_SIZE (TREE_TYPE (inner));
3418 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3419 || POINTER_TYPE_P (TREE_TYPE (inner)))
3420 && host_integerp (size, 0)
3421 && tree_low_cst (size, 0) == bitsize)
3422 return fold_convert_loc (loc, type, inner);
3425 bftype = type;
3426 if (TYPE_PRECISION (bftype) != bitsize
3427 || TYPE_UNSIGNED (bftype) == !unsignedp)
3428 bftype = build_nonstandard_integer_type (bitsize, 0);
3430 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3431 size_int (bitsize), bitsize_int (bitpos));
3433 if (bftype != type)
3434 result = fold_convert_loc (loc, type, result);
3436 return result;
3439 /* Optimize a bit-field compare.
3441 There are two cases: First is a compare against a constant and the
3442 second is a comparison of two items where the fields are at the same
3443 bit position relative to the start of a chunk (byte, halfword, word)
3444 large enough to contain it. In these cases we can avoid the shift
3445 implicit in bitfield extractions.
3447 For constants, we emit a compare of the shifted constant with the
3448 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3449 compared. For two fields at the same position, we do the ANDs with the
3450 similar mask and compare the result of the ANDs.
3452 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3453 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3454 are the left and right operands of the comparison, respectively.
3456 If the optimization described above can be done, we return the resulting
3457 tree. Otherwise we return zero. */
3459 static tree
3460 optimize_bit_field_compare (location_t loc, enum tree_code code,
3461 tree compare_type, tree lhs, tree rhs)
3463 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3464 tree type = TREE_TYPE (lhs);
3465 tree signed_type, unsigned_type;
3466 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3467 enum machine_mode lmode, rmode, nmode;
3468 int lunsignedp, runsignedp;
3469 int lvolatilep = 0, rvolatilep = 0;
3470 tree linner, rinner = NULL_TREE;
3471 tree mask;
3472 tree offset;
3474 /* In the strict volatile bitfields case, doing code changes here may prevent
3475 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3476 if (flag_strict_volatile_bitfields > 0)
3477 return 0;
3479 /* Get all the information about the extractions being done. If the bit size
3480 if the same as the size of the underlying object, we aren't doing an
3481 extraction at all and so can do nothing. We also don't want to
3482 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3483 then will no longer be able to replace it. */
3484 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3485 &lunsignedp, &lvolatilep, false);
3486 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3487 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3488 return 0;
3490 if (!const_p)
3492 /* If this is not a constant, we can only do something if bit positions,
3493 sizes, and signedness are the same. */
3494 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3495 &runsignedp, &rvolatilep, false);
3497 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3498 || lunsignedp != runsignedp || offset != 0
3499 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3500 return 0;
3503 /* See if we can find a mode to refer to this field. We should be able to,
3504 but fail if we can't. */
3505 if (lvolatilep
3506 && GET_MODE_BITSIZE (lmode) > 0
3507 && flag_strict_volatile_bitfields > 0)
3508 nmode = lmode;
3509 else
3510 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3511 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3512 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3513 TYPE_ALIGN (TREE_TYPE (rinner))),
3514 word_mode, lvolatilep || rvolatilep);
3515 if (nmode == VOIDmode)
3516 return 0;
3518 /* Set signed and unsigned types of the precision of this mode for the
3519 shifts below. */
3520 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3521 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3523 /* Compute the bit position and size for the new reference and our offset
3524 within it. If the new reference is the same size as the original, we
3525 won't optimize anything, so return zero. */
3526 nbitsize = GET_MODE_BITSIZE (nmode);
3527 nbitpos = lbitpos & ~ (nbitsize - 1);
3528 lbitpos -= nbitpos;
3529 if (nbitsize == lbitsize)
3530 return 0;
3532 if (BYTES_BIG_ENDIAN)
3533 lbitpos = nbitsize - lbitsize - lbitpos;
3535 /* Make the mask to be used against the extracted field. */
3536 mask = build_int_cst_type (unsigned_type, -1);
3537 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3538 mask = const_binop (RSHIFT_EXPR, mask,
3539 size_int (nbitsize - lbitsize - lbitpos));
3541 if (! const_p)
3542 /* If not comparing with constant, just rework the comparison
3543 and return. */
3544 return fold_build2_loc (loc, code, compare_type,
3545 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3546 make_bit_field_ref (loc, linner,
3547 unsigned_type,
3548 nbitsize, nbitpos,
3550 mask),
3551 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3552 make_bit_field_ref (loc, rinner,
3553 unsigned_type,
3554 nbitsize, nbitpos,
3556 mask));
3558 /* Otherwise, we are handling the constant case. See if the constant is too
3559 big for the field. Warn and return a tree of for 0 (false) if so. We do
3560 this not only for its own sake, but to avoid having to test for this
3561 error case below. If we didn't, we might generate wrong code.
3563 For unsigned fields, the constant shifted right by the field length should
3564 be all zero. For signed fields, the high-order bits should agree with
3565 the sign bit. */
3567 if (lunsignedp)
3569 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3570 fold_convert_loc (loc,
3571 unsigned_type, rhs),
3572 size_int (lbitsize))))
3574 warning (0, "comparison is always %d due to width of bit-field",
3575 code == NE_EXPR);
3576 return constant_boolean_node (code == NE_EXPR, compare_type);
3579 else
3581 tree tem = const_binop (RSHIFT_EXPR,
3582 fold_convert_loc (loc, signed_type, rhs),
3583 size_int (lbitsize - 1));
3584 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3586 warning (0, "comparison is always %d due to width of bit-field",
3587 code == NE_EXPR);
3588 return constant_boolean_node (code == NE_EXPR, compare_type);
3592 /* Single-bit compares should always be against zero. */
3593 if (lbitsize == 1 && ! integer_zerop (rhs))
3595 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3596 rhs = build_int_cst (type, 0);
3599 /* Make a new bitfield reference, shift the constant over the
3600 appropriate number of bits and mask it with the computed mask
3601 (in case this was a signed field). If we changed it, make a new one. */
3602 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3603 if (lvolatilep)
3605 TREE_SIDE_EFFECTS (lhs) = 1;
3606 TREE_THIS_VOLATILE (lhs) = 1;
3609 rhs = const_binop (BIT_AND_EXPR,
3610 const_binop (LSHIFT_EXPR,
3611 fold_convert_loc (loc, unsigned_type, rhs),
3612 size_int (lbitpos)),
3613 mask);
3615 lhs = build2_loc (loc, code, compare_type,
3616 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3617 return lhs;
3620 /* Subroutine for fold_truth_andor_1: decode a field reference.
3622 If EXP is a comparison reference, we return the innermost reference.
3624 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3625 set to the starting bit number.
3627 If the innermost field can be completely contained in a mode-sized
3628 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3630 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3631 otherwise it is not changed.
3633 *PUNSIGNEDP is set to the signedness of the field.
3635 *PMASK is set to the mask used. This is either contained in a
3636 BIT_AND_EXPR or derived from the width of the field.
3638 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3640 Return 0 if this is not a component reference or is one that we can't
3641 do anything with. */
3643 static tree
3644 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3645 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3646 int *punsignedp, int *pvolatilep,
3647 tree *pmask, tree *pand_mask)
3649 tree outer_type = 0;
3650 tree and_mask = 0;
3651 tree mask, inner, offset;
3652 tree unsigned_type;
3653 unsigned int precision;
3655 /* All the optimizations using this function assume integer fields.
3656 There are problems with FP fields since the type_for_size call
3657 below can fail for, e.g., XFmode. */
3658 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3659 return 0;
3661 /* We are interested in the bare arrangement of bits, so strip everything
3662 that doesn't affect the machine mode. However, record the type of the
3663 outermost expression if it may matter below. */
3664 if (CONVERT_EXPR_P (exp)
3665 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3666 outer_type = TREE_TYPE (exp);
3667 STRIP_NOPS (exp);
3669 if (TREE_CODE (exp) == BIT_AND_EXPR)
3671 and_mask = TREE_OPERAND (exp, 1);
3672 exp = TREE_OPERAND (exp, 0);
3673 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3674 if (TREE_CODE (and_mask) != INTEGER_CST)
3675 return 0;
3678 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3679 punsignedp, pvolatilep, false);
3680 if ((inner == exp && and_mask == 0)
3681 || *pbitsize < 0 || offset != 0
3682 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3683 return 0;
3685 /* If the number of bits in the reference is the same as the bitsize of
3686 the outer type, then the outer type gives the signedness. Otherwise
3687 (in case of a small bitfield) the signedness is unchanged. */
3688 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3689 *punsignedp = TYPE_UNSIGNED (outer_type);
3691 /* Compute the mask to access the bitfield. */
3692 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3693 precision = TYPE_PRECISION (unsigned_type);
3695 mask = build_int_cst_type (unsigned_type, -1);
3697 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3698 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3700 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3701 if (and_mask != 0)
3702 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3703 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3705 *pmask = mask;
3706 *pand_mask = and_mask;
3707 return inner;
3710 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3711 bit positions. */
3713 static int
3714 all_ones_mask_p (const_tree mask, int size)
3716 tree type = TREE_TYPE (mask);
3717 unsigned int precision = TYPE_PRECISION (type);
3718 tree tmask;
3720 tmask = build_int_cst_type (signed_type_for (type), -1);
3722 return
3723 tree_int_cst_equal (mask,
3724 const_binop (RSHIFT_EXPR,
3725 const_binop (LSHIFT_EXPR, tmask,
3726 size_int (precision - size)),
3727 size_int (precision - size)));
3730 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3731 represents the sign bit of EXP's type. If EXP represents a sign
3732 or zero extension, also test VAL against the unextended type.
3733 The return value is the (sub)expression whose sign bit is VAL,
3734 or NULL_TREE otherwise. */
3736 static tree
3737 sign_bit_p (tree exp, const_tree val)
3739 unsigned HOST_WIDE_INT mask_lo, lo;
3740 HOST_WIDE_INT mask_hi, hi;
3741 int width;
3742 tree t;
3744 /* Tree EXP must have an integral type. */
3745 t = TREE_TYPE (exp);
3746 if (! INTEGRAL_TYPE_P (t))
3747 return NULL_TREE;
3749 /* Tree VAL must be an integer constant. */
3750 if (TREE_CODE (val) != INTEGER_CST
3751 || TREE_OVERFLOW (val))
3752 return NULL_TREE;
3754 width = TYPE_PRECISION (t);
3755 if (width > HOST_BITS_PER_WIDE_INT)
3757 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3758 lo = 0;
3760 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3761 mask_lo = -1;
3763 else
3765 hi = 0;
3766 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3768 mask_hi = 0;
3769 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3772 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3773 treat VAL as if it were unsigned. */
3774 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3775 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3776 return exp;
3778 /* Handle extension from a narrower type. */
3779 if (TREE_CODE (exp) == NOP_EXPR
3780 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3781 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3783 return NULL_TREE;
3786 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3787 to be evaluated unconditionally. */
3789 static int
3790 simple_operand_p (const_tree exp)
3792 /* Strip any conversions that don't change the machine mode. */
3793 STRIP_NOPS (exp);
3795 return (CONSTANT_CLASS_P (exp)
3796 || TREE_CODE (exp) == SSA_NAME
3797 || (DECL_P (exp)
3798 && ! TREE_ADDRESSABLE (exp)
3799 && ! TREE_THIS_VOLATILE (exp)
3800 && ! DECL_NONLOCAL (exp)
3801 /* Don't regard global variables as simple. They may be
3802 allocated in ways unknown to the compiler (shared memory,
3803 #pragma weak, etc). */
3804 && ! TREE_PUBLIC (exp)
3805 && ! DECL_EXTERNAL (exp)
3806 /* Weakrefs are not safe to be read, since they can be NULL.
3807 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3808 have DECL_WEAK flag set. */
3809 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3810 /* Loading a static variable is unduly expensive, but global
3811 registers aren't expensive. */
3812 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3815 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3816 to be evaluated unconditionally.
3817 I addition to simple_operand_p, we assume that comparisons, conversions,
3818 and logic-not operations are simple, if their operands are simple, too. */
3820 static bool
3821 simple_operand_p_2 (tree exp)
3823 enum tree_code code;
3825 if (TREE_SIDE_EFFECTS (exp)
3826 || tree_could_trap_p (exp))
3827 return false;
3829 while (CONVERT_EXPR_P (exp))
3830 exp = TREE_OPERAND (exp, 0);
3832 code = TREE_CODE (exp);
3834 if (TREE_CODE_CLASS (code) == tcc_comparison)
3835 return (simple_operand_p (TREE_OPERAND (exp, 0))
3836 && simple_operand_p (TREE_OPERAND (exp, 1)));
3838 if (code == TRUTH_NOT_EXPR)
3839 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3841 return simple_operand_p (exp);
3845 /* The following functions are subroutines to fold_range_test and allow it to
3846 try to change a logical combination of comparisons into a range test.
3848 For example, both
3849 X == 2 || X == 3 || X == 4 || X == 5
3851 X >= 2 && X <= 5
3852 are converted to
3853 (unsigned) (X - 2) <= 3
3855 We describe each set of comparisons as being either inside or outside
3856 a range, using a variable named like IN_P, and then describe the
3857 range with a lower and upper bound. If one of the bounds is omitted,
3858 it represents either the highest or lowest value of the type.
3860 In the comments below, we represent a range by two numbers in brackets
3861 preceded by a "+" to designate being inside that range, or a "-" to
3862 designate being outside that range, so the condition can be inverted by
3863 flipping the prefix. An omitted bound is represented by a "-". For
3864 example, "- [-, 10]" means being outside the range starting at the lowest
3865 possible value and ending at 10, in other words, being greater than 10.
3866 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3867 always false.
3869 We set up things so that the missing bounds are handled in a consistent
3870 manner so neither a missing bound nor "true" and "false" need to be
3871 handled using a special case. */
3873 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3874 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3875 and UPPER1_P are nonzero if the respective argument is an upper bound
3876 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3877 must be specified for a comparison. ARG1 will be converted to ARG0's
3878 type if both are specified. */
3880 static tree
3881 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3882 tree arg1, int upper1_p)
3884 tree tem;
3885 int result;
3886 int sgn0, sgn1;
3888 /* If neither arg represents infinity, do the normal operation.
3889 Else, if not a comparison, return infinity. Else handle the special
3890 comparison rules. Note that most of the cases below won't occur, but
3891 are handled for consistency. */
3893 if (arg0 != 0 && arg1 != 0)
3895 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3896 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3897 STRIP_NOPS (tem);
3898 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3901 if (TREE_CODE_CLASS (code) != tcc_comparison)
3902 return 0;
3904 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3905 for neither. In real maths, we cannot assume open ended ranges are
3906 the same. But, this is computer arithmetic, where numbers are finite.
3907 We can therefore make the transformation of any unbounded range with
3908 the value Z, Z being greater than any representable number. This permits
3909 us to treat unbounded ranges as equal. */
3910 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3911 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3912 switch (code)
3914 case EQ_EXPR:
3915 result = sgn0 == sgn1;
3916 break;
3917 case NE_EXPR:
3918 result = sgn0 != sgn1;
3919 break;
3920 case LT_EXPR:
3921 result = sgn0 < sgn1;
3922 break;
3923 case LE_EXPR:
3924 result = sgn0 <= sgn1;
3925 break;
3926 case GT_EXPR:
3927 result = sgn0 > sgn1;
3928 break;
3929 case GE_EXPR:
3930 result = sgn0 >= sgn1;
3931 break;
3932 default:
3933 gcc_unreachable ();
3936 return constant_boolean_node (result, type);
3939 /* Helper routine for make_range. Perform one step for it, return
3940 new expression if the loop should continue or NULL_TREE if it should
3941 stop. */
3943 tree
3944 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3945 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3946 bool *strict_overflow_p)
3948 tree arg0_type = TREE_TYPE (arg0);
3949 tree n_low, n_high, low = *p_low, high = *p_high;
3950 int in_p = *p_in_p, n_in_p;
3952 switch (code)
3954 case TRUTH_NOT_EXPR:
3955 /* We can only do something if the range is testing for zero. */
3956 if (low == NULL_TREE || high == NULL_TREE
3957 || ! integer_zerop (low) || ! integer_zerop (high))
3958 return NULL_TREE;
3959 *p_in_p = ! in_p;
3960 return arg0;
3962 case EQ_EXPR: case NE_EXPR:
3963 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3964 /* We can only do something if the range is testing for zero
3965 and if the second operand is an integer constant. Note that
3966 saying something is "in" the range we make is done by
3967 complementing IN_P since it will set in the initial case of
3968 being not equal to zero; "out" is leaving it alone. */
3969 if (low == NULL_TREE || high == NULL_TREE
3970 || ! integer_zerop (low) || ! integer_zerop (high)
3971 || TREE_CODE (arg1) != INTEGER_CST)
3972 return NULL_TREE;
3974 switch (code)
3976 case NE_EXPR: /* - [c, c] */
3977 low = high = arg1;
3978 break;
3979 case EQ_EXPR: /* + [c, c] */
3980 in_p = ! in_p, low = high = arg1;
3981 break;
3982 case GT_EXPR: /* - [-, c] */
3983 low = 0, high = arg1;
3984 break;
3985 case GE_EXPR: /* + [c, -] */
3986 in_p = ! in_p, low = arg1, high = 0;
3987 break;
3988 case LT_EXPR: /* - [c, -] */
3989 low = arg1, high = 0;
3990 break;
3991 case LE_EXPR: /* + [-, c] */
3992 in_p = ! in_p, low = 0, high = arg1;
3993 break;
3994 default:
3995 gcc_unreachable ();
3998 /* If this is an unsigned comparison, we also know that EXP is
3999 greater than or equal to zero. We base the range tests we make
4000 on that fact, so we record it here so we can parse existing
4001 range tests. We test arg0_type since often the return type
4002 of, e.g. EQ_EXPR, is boolean. */
4003 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4005 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4006 in_p, low, high, 1,
4007 build_int_cst (arg0_type, 0),
4008 NULL_TREE))
4009 return NULL_TREE;
4011 in_p = n_in_p, low = n_low, high = n_high;
4013 /* If the high bound is missing, but we have a nonzero low
4014 bound, reverse the range so it goes from zero to the low bound
4015 minus 1. */
4016 if (high == 0 && low && ! integer_zerop (low))
4018 in_p = ! in_p;
4019 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4020 integer_one_node, 0);
4021 low = build_int_cst (arg0_type, 0);
4025 *p_low = low;
4026 *p_high = high;
4027 *p_in_p = in_p;
4028 return arg0;
4030 case NEGATE_EXPR:
4031 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4032 low and high are non-NULL, then normalize will DTRT. */
4033 if (!TYPE_UNSIGNED (arg0_type)
4034 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4036 if (low == NULL_TREE)
4037 low = TYPE_MIN_VALUE (arg0_type);
4038 if (high == NULL_TREE)
4039 high = TYPE_MAX_VALUE (arg0_type);
4042 /* (-x) IN [a,b] -> x in [-b, -a] */
4043 n_low = range_binop (MINUS_EXPR, exp_type,
4044 build_int_cst (exp_type, 0),
4045 0, high, 1);
4046 n_high = range_binop (MINUS_EXPR, exp_type,
4047 build_int_cst (exp_type, 0),
4048 0, low, 0);
4049 if (n_high != 0 && TREE_OVERFLOW (n_high))
4050 return NULL_TREE;
4051 goto normalize;
4053 case BIT_NOT_EXPR:
4054 /* ~ X -> -X - 1 */
4055 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4056 build_int_cst (exp_type, 1));
4058 case PLUS_EXPR:
4059 case MINUS_EXPR:
4060 if (TREE_CODE (arg1) != INTEGER_CST)
4061 return NULL_TREE;
4063 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4064 move a constant to the other side. */
4065 if (!TYPE_UNSIGNED (arg0_type)
4066 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4067 return NULL_TREE;
4069 /* If EXP is signed, any overflow in the computation is undefined,
4070 so we don't worry about it so long as our computations on
4071 the bounds don't overflow. For unsigned, overflow is defined
4072 and this is exactly the right thing. */
4073 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4074 arg0_type, low, 0, arg1, 0);
4075 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4076 arg0_type, high, 1, arg1, 0);
4077 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4078 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4079 return NULL_TREE;
4081 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4082 *strict_overflow_p = true;
4084 normalize:
4085 /* Check for an unsigned range which has wrapped around the maximum
4086 value thus making n_high < n_low, and normalize it. */
4087 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4089 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4090 integer_one_node, 0);
4091 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4092 integer_one_node, 0);
4094 /* If the range is of the form +/- [ x+1, x ], we won't
4095 be able to normalize it. But then, it represents the
4096 whole range or the empty set, so make it
4097 +/- [ -, - ]. */
4098 if (tree_int_cst_equal (n_low, low)
4099 && tree_int_cst_equal (n_high, high))
4100 low = high = 0;
4101 else
4102 in_p = ! in_p;
4104 else
4105 low = n_low, high = n_high;
4107 *p_low = low;
4108 *p_high = high;
4109 *p_in_p = in_p;
4110 return arg0;
4112 CASE_CONVERT:
4113 case NON_LVALUE_EXPR:
4114 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4115 return NULL_TREE;
4117 if (! INTEGRAL_TYPE_P (arg0_type)
4118 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4119 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4120 return NULL_TREE;
4122 n_low = low, n_high = high;
4124 if (n_low != 0)
4125 n_low = fold_convert_loc (loc, arg0_type, n_low);
4127 if (n_high != 0)
4128 n_high = fold_convert_loc (loc, arg0_type, n_high);
4130 /* If we're converting arg0 from an unsigned type, to exp,
4131 a signed type, we will be doing the comparison as unsigned.
4132 The tests above have already verified that LOW and HIGH
4133 are both positive.
4135 So we have to ensure that we will handle large unsigned
4136 values the same way that the current signed bounds treat
4137 negative values. */
4139 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4141 tree high_positive;
4142 tree equiv_type;
4143 /* For fixed-point modes, we need to pass the saturating flag
4144 as the 2nd parameter. */
4145 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4146 equiv_type
4147 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4148 TYPE_SATURATING (arg0_type));
4149 else
4150 equiv_type
4151 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4153 /* A range without an upper bound is, naturally, unbounded.
4154 Since convert would have cropped a very large value, use
4155 the max value for the destination type. */
4156 high_positive
4157 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4158 : TYPE_MAX_VALUE (arg0_type);
4160 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4161 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4162 fold_convert_loc (loc, arg0_type,
4163 high_positive),
4164 build_int_cst (arg0_type, 1));
4166 /* If the low bound is specified, "and" the range with the
4167 range for which the original unsigned value will be
4168 positive. */
4169 if (low != 0)
4171 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4172 1, fold_convert_loc (loc, arg0_type,
4173 integer_zero_node),
4174 high_positive))
4175 return NULL_TREE;
4177 in_p = (n_in_p == in_p);
4179 else
4181 /* Otherwise, "or" the range with the range of the input
4182 that will be interpreted as negative. */
4183 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4184 1, fold_convert_loc (loc, arg0_type,
4185 integer_zero_node),
4186 high_positive))
4187 return NULL_TREE;
4189 in_p = (in_p != n_in_p);
4193 *p_low = n_low;
4194 *p_high = n_high;
4195 *p_in_p = in_p;
4196 return arg0;
4198 default:
4199 return NULL_TREE;
4203 /* Given EXP, a logical expression, set the range it is testing into
4204 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4205 actually being tested. *PLOW and *PHIGH will be made of the same
4206 type as the returned expression. If EXP is not a comparison, we
4207 will most likely not be returning a useful value and range. Set
4208 *STRICT_OVERFLOW_P to true if the return value is only valid
4209 because signed overflow is undefined; otherwise, do not change
4210 *STRICT_OVERFLOW_P. */
4212 tree
4213 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4214 bool *strict_overflow_p)
4216 enum tree_code code;
4217 tree arg0, arg1 = NULL_TREE;
4218 tree exp_type, nexp;
4219 int in_p;
4220 tree low, high;
4221 location_t loc = EXPR_LOCATION (exp);
4223 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4224 and see if we can refine the range. Some of the cases below may not
4225 happen, but it doesn't seem worth worrying about this. We "continue"
4226 the outer loop when we've changed something; otherwise we "break"
4227 the switch, which will "break" the while. */
4229 in_p = 0;
4230 low = high = build_int_cst (TREE_TYPE (exp), 0);
4232 while (1)
4234 code = TREE_CODE (exp);
4235 exp_type = TREE_TYPE (exp);
4236 arg0 = NULL_TREE;
4238 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4240 if (TREE_OPERAND_LENGTH (exp) > 0)
4241 arg0 = TREE_OPERAND (exp, 0);
4242 if (TREE_CODE_CLASS (code) == tcc_binary
4243 || TREE_CODE_CLASS (code) == tcc_comparison
4244 || (TREE_CODE_CLASS (code) == tcc_expression
4245 && TREE_OPERAND_LENGTH (exp) > 1))
4246 arg1 = TREE_OPERAND (exp, 1);
4248 if (arg0 == NULL_TREE)
4249 break;
4251 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4252 &high, &in_p, strict_overflow_p);
4253 if (nexp == NULL_TREE)
4254 break;
4255 exp = nexp;
4258 /* If EXP is a constant, we can evaluate whether this is true or false. */
4259 if (TREE_CODE (exp) == INTEGER_CST)
4261 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4262 exp, 0, low, 0))
4263 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4264 exp, 1, high, 1)));
4265 low = high = 0;
4266 exp = 0;
4269 *pin_p = in_p, *plow = low, *phigh = high;
4270 return exp;
4273 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4274 type, TYPE, return an expression to test if EXP is in (or out of, depending
4275 on IN_P) the range. Return 0 if the test couldn't be created. */
4277 tree
4278 build_range_check (location_t loc, tree type, tree exp, int in_p,
4279 tree low, tree high)
4281 tree etype = TREE_TYPE (exp), value;
4283 #ifdef HAVE_canonicalize_funcptr_for_compare
4284 /* Disable this optimization for function pointer expressions
4285 on targets that require function pointer canonicalization. */
4286 if (HAVE_canonicalize_funcptr_for_compare
4287 && TREE_CODE (etype) == POINTER_TYPE
4288 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4289 return NULL_TREE;
4290 #endif
4292 if (! in_p)
4294 value = build_range_check (loc, type, exp, 1, low, high);
4295 if (value != 0)
4296 return invert_truthvalue_loc (loc, value);
4298 return 0;
4301 if (low == 0 && high == 0)
4302 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4304 if (low == 0)
4305 return fold_build2_loc (loc, LE_EXPR, type, exp,
4306 fold_convert_loc (loc, etype, high));
4308 if (high == 0)
4309 return fold_build2_loc (loc, GE_EXPR, type, exp,
4310 fold_convert_loc (loc, etype, low));
4312 if (operand_equal_p (low, high, 0))
4313 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4314 fold_convert_loc (loc, etype, low));
4316 if (integer_zerop (low))
4318 if (! TYPE_UNSIGNED (etype))
4320 etype = unsigned_type_for (etype);
4321 high = fold_convert_loc (loc, etype, high);
4322 exp = fold_convert_loc (loc, etype, exp);
4324 return build_range_check (loc, type, exp, 1, 0, high);
4327 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4328 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4330 unsigned HOST_WIDE_INT lo;
4331 HOST_WIDE_INT hi;
4332 int prec;
4334 prec = TYPE_PRECISION (etype);
4335 if (prec <= HOST_BITS_PER_WIDE_INT)
4337 hi = 0;
4338 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4340 else
4342 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4343 lo = HOST_WIDE_INT_M1U;
4346 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4348 if (TYPE_UNSIGNED (etype))
4350 tree signed_etype = signed_type_for (etype);
4351 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4352 etype
4353 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4354 else
4355 etype = signed_etype;
4356 exp = fold_convert_loc (loc, etype, exp);
4358 return fold_build2_loc (loc, GT_EXPR, type, exp,
4359 build_int_cst (etype, 0));
4363 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4364 This requires wrap-around arithmetics for the type of the expression.
4365 First make sure that arithmetics in this type is valid, then make sure
4366 that it wraps around. */
4367 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4368 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4369 TYPE_UNSIGNED (etype));
4371 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4373 tree utype, minv, maxv;
4375 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4376 for the type in question, as we rely on this here. */
4377 utype = unsigned_type_for (etype);
4378 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4379 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4380 integer_one_node, 1);
4381 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4383 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4384 minv, 1, maxv, 1)))
4385 etype = utype;
4386 else
4387 return 0;
4390 high = fold_convert_loc (loc, etype, high);
4391 low = fold_convert_loc (loc, etype, low);
4392 exp = fold_convert_loc (loc, etype, exp);
4394 value = const_binop (MINUS_EXPR, high, low);
4397 if (POINTER_TYPE_P (etype))
4399 if (value != 0 && !TREE_OVERFLOW (value))
4401 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4402 return build_range_check (loc, type,
4403 fold_build_pointer_plus_loc (loc, exp, low),
4404 1, build_int_cst (etype, 0), value);
4406 return 0;
4409 if (value != 0 && !TREE_OVERFLOW (value))
4410 return build_range_check (loc, type,
4411 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4412 1, build_int_cst (etype, 0), value);
4414 return 0;
4417 /* Return the predecessor of VAL in its type, handling the infinite case. */
4419 static tree
4420 range_predecessor (tree val)
4422 tree type = TREE_TYPE (val);
4424 if (INTEGRAL_TYPE_P (type)
4425 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4426 return 0;
4427 else
4428 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4431 /* Return the successor of VAL in its type, handling the infinite case. */
4433 static tree
4434 range_successor (tree val)
4436 tree type = TREE_TYPE (val);
4438 if (INTEGRAL_TYPE_P (type)
4439 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4440 return 0;
4441 else
4442 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4445 /* Given two ranges, see if we can merge them into one. Return 1 if we
4446 can, 0 if we can't. Set the output range into the specified parameters. */
4448 bool
4449 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4450 tree high0, int in1_p, tree low1, tree high1)
4452 int no_overlap;
4453 int subset;
4454 int temp;
4455 tree tem;
4456 int in_p;
4457 tree low, high;
4458 int lowequal = ((low0 == 0 && low1 == 0)
4459 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4460 low0, 0, low1, 0)));
4461 int highequal = ((high0 == 0 && high1 == 0)
4462 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4463 high0, 1, high1, 1)));
4465 /* Make range 0 be the range that starts first, or ends last if they
4466 start at the same value. Swap them if it isn't. */
4467 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4468 low0, 0, low1, 0))
4469 || (lowequal
4470 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4471 high1, 1, high0, 1))))
4473 temp = in0_p, in0_p = in1_p, in1_p = temp;
4474 tem = low0, low0 = low1, low1 = tem;
4475 tem = high0, high0 = high1, high1 = tem;
4478 /* Now flag two cases, whether the ranges are disjoint or whether the
4479 second range is totally subsumed in the first. Note that the tests
4480 below are simplified by the ones above. */
4481 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4482 high0, 1, low1, 0));
4483 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4484 high1, 1, high0, 1));
4486 /* We now have four cases, depending on whether we are including or
4487 excluding the two ranges. */
4488 if (in0_p && in1_p)
4490 /* If they don't overlap, the result is false. If the second range
4491 is a subset it is the result. Otherwise, the range is from the start
4492 of the second to the end of the first. */
4493 if (no_overlap)
4494 in_p = 0, low = high = 0;
4495 else if (subset)
4496 in_p = 1, low = low1, high = high1;
4497 else
4498 in_p = 1, low = low1, high = high0;
4501 else if (in0_p && ! in1_p)
4503 /* If they don't overlap, the result is the first range. If they are
4504 equal, the result is false. If the second range is a subset of the
4505 first, and the ranges begin at the same place, we go from just after
4506 the end of the second range to the end of the first. If the second
4507 range is not a subset of the first, or if it is a subset and both
4508 ranges end at the same place, the range starts at the start of the
4509 first range and ends just before the second range.
4510 Otherwise, we can't describe this as a single range. */
4511 if (no_overlap)
4512 in_p = 1, low = low0, high = high0;
4513 else if (lowequal && highequal)
4514 in_p = 0, low = high = 0;
4515 else if (subset && lowequal)
4517 low = range_successor (high1);
4518 high = high0;
4519 in_p = 1;
4520 if (low == 0)
4522 /* We are in the weird situation where high0 > high1 but
4523 high1 has no successor. Punt. */
4524 return 0;
4527 else if (! subset || highequal)
4529 low = low0;
4530 high = range_predecessor (low1);
4531 in_p = 1;
4532 if (high == 0)
4534 /* low0 < low1 but low1 has no predecessor. Punt. */
4535 return 0;
4538 else
4539 return 0;
4542 else if (! in0_p && in1_p)
4544 /* If they don't overlap, the result is the second range. If the second
4545 is a subset of the first, the result is false. Otherwise,
4546 the range starts just after the first range and ends at the
4547 end of the second. */
4548 if (no_overlap)
4549 in_p = 1, low = low1, high = high1;
4550 else if (subset || highequal)
4551 in_p = 0, low = high = 0;
4552 else
4554 low = range_successor (high0);
4555 high = high1;
4556 in_p = 1;
4557 if (low == 0)
4559 /* high1 > high0 but high0 has no successor. Punt. */
4560 return 0;
4565 else
4567 /* The case where we are excluding both ranges. Here the complex case
4568 is if they don't overlap. In that case, the only time we have a
4569 range is if they are adjacent. If the second is a subset of the
4570 first, the result is the first. Otherwise, the range to exclude
4571 starts at the beginning of the first range and ends at the end of the
4572 second. */
4573 if (no_overlap)
4575 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4576 range_successor (high0),
4577 1, low1, 0)))
4578 in_p = 0, low = low0, high = high1;
4579 else
4581 /* Canonicalize - [min, x] into - [-, x]. */
4582 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4583 switch (TREE_CODE (TREE_TYPE (low0)))
4585 case ENUMERAL_TYPE:
4586 if (TYPE_PRECISION (TREE_TYPE (low0))
4587 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4588 break;
4589 /* FALLTHROUGH */
4590 case INTEGER_TYPE:
4591 if (tree_int_cst_equal (low0,
4592 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4593 low0 = 0;
4594 break;
4595 case POINTER_TYPE:
4596 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4597 && integer_zerop (low0))
4598 low0 = 0;
4599 break;
4600 default:
4601 break;
4604 /* Canonicalize - [x, max] into - [x, -]. */
4605 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4606 switch (TREE_CODE (TREE_TYPE (high1)))
4608 case ENUMERAL_TYPE:
4609 if (TYPE_PRECISION (TREE_TYPE (high1))
4610 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4611 break;
4612 /* FALLTHROUGH */
4613 case INTEGER_TYPE:
4614 if (tree_int_cst_equal (high1,
4615 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4616 high1 = 0;
4617 break;
4618 case POINTER_TYPE:
4619 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4620 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4621 high1, 1,
4622 integer_one_node, 1)))
4623 high1 = 0;
4624 break;
4625 default:
4626 break;
4629 /* The ranges might be also adjacent between the maximum and
4630 minimum values of the given type. For
4631 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4632 return + [x + 1, y - 1]. */
4633 if (low0 == 0 && high1 == 0)
4635 low = range_successor (high0);
4636 high = range_predecessor (low1);
4637 if (low == 0 || high == 0)
4638 return 0;
4640 in_p = 1;
4642 else
4643 return 0;
4646 else if (subset)
4647 in_p = 0, low = low0, high = high0;
4648 else
4649 in_p = 0, low = low0, high = high1;
4652 *pin_p = in_p, *plow = low, *phigh = high;
4653 return 1;
4657 /* Subroutine of fold, looking inside expressions of the form
4658 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4659 of the COND_EXPR. This function is being used also to optimize
4660 A op B ? C : A, by reversing the comparison first.
4662 Return a folded expression whose code is not a COND_EXPR
4663 anymore, or NULL_TREE if no folding opportunity is found. */
4665 static tree
4666 fold_cond_expr_with_comparison (location_t loc, tree type,
4667 tree arg0, tree arg1, tree arg2)
4669 enum tree_code comp_code = TREE_CODE (arg0);
4670 tree arg00 = TREE_OPERAND (arg0, 0);
4671 tree arg01 = TREE_OPERAND (arg0, 1);
4672 tree arg1_type = TREE_TYPE (arg1);
4673 tree tem;
4675 STRIP_NOPS (arg1);
4676 STRIP_NOPS (arg2);
4678 /* If we have A op 0 ? A : -A, consider applying the following
4679 transformations:
4681 A == 0? A : -A same as -A
4682 A != 0? A : -A same as A
4683 A >= 0? A : -A same as abs (A)
4684 A > 0? A : -A same as abs (A)
4685 A <= 0? A : -A same as -abs (A)
4686 A < 0? A : -A same as -abs (A)
4688 None of these transformations work for modes with signed
4689 zeros. If A is +/-0, the first two transformations will
4690 change the sign of the result (from +0 to -0, or vice
4691 versa). The last four will fix the sign of the result,
4692 even though the original expressions could be positive or
4693 negative, depending on the sign of A.
4695 Note that all these transformations are correct if A is
4696 NaN, since the two alternatives (A and -A) are also NaNs. */
4697 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4698 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4699 ? real_zerop (arg01)
4700 : integer_zerop (arg01))
4701 && ((TREE_CODE (arg2) == NEGATE_EXPR
4702 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4703 /* In the case that A is of the form X-Y, '-A' (arg2) may
4704 have already been folded to Y-X, check for that. */
4705 || (TREE_CODE (arg1) == MINUS_EXPR
4706 && TREE_CODE (arg2) == MINUS_EXPR
4707 && operand_equal_p (TREE_OPERAND (arg1, 0),
4708 TREE_OPERAND (arg2, 1), 0)
4709 && operand_equal_p (TREE_OPERAND (arg1, 1),
4710 TREE_OPERAND (arg2, 0), 0))))
4711 switch (comp_code)
4713 case EQ_EXPR:
4714 case UNEQ_EXPR:
4715 tem = fold_convert_loc (loc, arg1_type, arg1);
4716 return pedantic_non_lvalue_loc (loc,
4717 fold_convert_loc (loc, type,
4718 negate_expr (tem)));
4719 case NE_EXPR:
4720 case LTGT_EXPR:
4721 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4722 case UNGE_EXPR:
4723 case UNGT_EXPR:
4724 if (flag_trapping_math)
4725 break;
4726 /* Fall through. */
4727 case GE_EXPR:
4728 case GT_EXPR:
4729 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4730 arg1 = fold_convert_loc (loc, signed_type_for
4731 (TREE_TYPE (arg1)), arg1);
4732 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4733 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4734 case UNLE_EXPR:
4735 case UNLT_EXPR:
4736 if (flag_trapping_math)
4737 break;
4738 case LE_EXPR:
4739 case LT_EXPR:
4740 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4741 arg1 = fold_convert_loc (loc, signed_type_for
4742 (TREE_TYPE (arg1)), arg1);
4743 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4744 return negate_expr (fold_convert_loc (loc, type, tem));
4745 default:
4746 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4747 break;
4750 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4751 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4752 both transformations are correct when A is NaN: A != 0
4753 is then true, and A == 0 is false. */
4755 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4756 && integer_zerop (arg01) && integer_zerop (arg2))
4758 if (comp_code == NE_EXPR)
4759 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4760 else if (comp_code == EQ_EXPR)
4761 return build_zero_cst (type);
4764 /* Try some transformations of A op B ? A : B.
4766 A == B? A : B same as B
4767 A != B? A : B same as A
4768 A >= B? A : B same as max (A, B)
4769 A > B? A : B same as max (B, A)
4770 A <= B? A : B same as min (A, B)
4771 A < B? A : B same as min (B, A)
4773 As above, these transformations don't work in the presence
4774 of signed zeros. For example, if A and B are zeros of
4775 opposite sign, the first two transformations will change
4776 the sign of the result. In the last four, the original
4777 expressions give different results for (A=+0, B=-0) and
4778 (A=-0, B=+0), but the transformed expressions do not.
4780 The first two transformations are correct if either A or B
4781 is a NaN. In the first transformation, the condition will
4782 be false, and B will indeed be chosen. In the case of the
4783 second transformation, the condition A != B will be true,
4784 and A will be chosen.
4786 The conversions to max() and min() are not correct if B is
4787 a number and A is not. The conditions in the original
4788 expressions will be false, so all four give B. The min()
4789 and max() versions would give a NaN instead. */
4790 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4791 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4792 /* Avoid these transformations if the COND_EXPR may be used
4793 as an lvalue in the C++ front-end. PR c++/19199. */
4794 && (in_gimple_form
4795 || VECTOR_TYPE_P (type)
4796 || (strcmp (lang_hooks.name, "GNU C++") != 0
4797 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4798 || ! maybe_lvalue_p (arg1)
4799 || ! maybe_lvalue_p (arg2)))
4801 tree comp_op0 = arg00;
4802 tree comp_op1 = arg01;
4803 tree comp_type = TREE_TYPE (comp_op0);
4805 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4806 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4808 comp_type = type;
4809 comp_op0 = arg1;
4810 comp_op1 = arg2;
4813 switch (comp_code)
4815 case EQ_EXPR:
4816 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4817 case NE_EXPR:
4818 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4819 case LE_EXPR:
4820 case LT_EXPR:
4821 case UNLE_EXPR:
4822 case UNLT_EXPR:
4823 /* In C++ a ?: expression can be an lvalue, so put the
4824 operand which will be used if they are equal first
4825 so that we can convert this back to the
4826 corresponding COND_EXPR. */
4827 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4829 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4830 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4831 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4832 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4833 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4834 comp_op1, comp_op0);
4835 return pedantic_non_lvalue_loc (loc,
4836 fold_convert_loc (loc, type, tem));
4838 break;
4839 case GE_EXPR:
4840 case GT_EXPR:
4841 case UNGE_EXPR:
4842 case UNGT_EXPR:
4843 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4845 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4846 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4847 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4848 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4849 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4850 comp_op1, comp_op0);
4851 return pedantic_non_lvalue_loc (loc,
4852 fold_convert_loc (loc, type, tem));
4854 break;
4855 case UNEQ_EXPR:
4856 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4857 return pedantic_non_lvalue_loc (loc,
4858 fold_convert_loc (loc, type, arg2));
4859 break;
4860 case LTGT_EXPR:
4861 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4862 return pedantic_non_lvalue_loc (loc,
4863 fold_convert_loc (loc, type, arg1));
4864 break;
4865 default:
4866 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4867 break;
4871 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4872 we might still be able to simplify this. For example,
4873 if C1 is one less or one more than C2, this might have started
4874 out as a MIN or MAX and been transformed by this function.
4875 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4877 if (INTEGRAL_TYPE_P (type)
4878 && TREE_CODE (arg01) == INTEGER_CST
4879 && TREE_CODE (arg2) == INTEGER_CST)
4880 switch (comp_code)
4882 case EQ_EXPR:
4883 if (TREE_CODE (arg1) == INTEGER_CST)
4884 break;
4885 /* We can replace A with C1 in this case. */
4886 arg1 = fold_convert_loc (loc, type, arg01);
4887 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4889 case LT_EXPR:
4890 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4891 MIN_EXPR, to preserve the signedness of the comparison. */
4892 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4893 OEP_ONLY_CONST)
4894 && operand_equal_p (arg01,
4895 const_binop (PLUS_EXPR, arg2,
4896 build_int_cst (type, 1)),
4897 OEP_ONLY_CONST))
4899 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4900 fold_convert_loc (loc, TREE_TYPE (arg00),
4901 arg2));
4902 return pedantic_non_lvalue_loc (loc,
4903 fold_convert_loc (loc, type, tem));
4905 break;
4907 case LE_EXPR:
4908 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4909 as above. */
4910 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4911 OEP_ONLY_CONST)
4912 && operand_equal_p (arg01,
4913 const_binop (MINUS_EXPR, arg2,
4914 build_int_cst (type, 1)),
4915 OEP_ONLY_CONST))
4917 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4918 fold_convert_loc (loc, TREE_TYPE (arg00),
4919 arg2));
4920 return pedantic_non_lvalue_loc (loc,
4921 fold_convert_loc (loc, type, tem));
4923 break;
4925 case GT_EXPR:
4926 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4927 MAX_EXPR, to preserve the signedness of the comparison. */
4928 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4929 OEP_ONLY_CONST)
4930 && operand_equal_p (arg01,
4931 const_binop (MINUS_EXPR, arg2,
4932 build_int_cst (type, 1)),
4933 OEP_ONLY_CONST))
4935 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4936 fold_convert_loc (loc, TREE_TYPE (arg00),
4937 arg2));
4938 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4940 break;
4942 case GE_EXPR:
4943 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4944 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4945 OEP_ONLY_CONST)
4946 && operand_equal_p (arg01,
4947 const_binop (PLUS_EXPR, arg2,
4948 build_int_cst (type, 1)),
4949 OEP_ONLY_CONST))
4951 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4952 fold_convert_loc (loc, TREE_TYPE (arg00),
4953 arg2));
4954 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4956 break;
4957 case NE_EXPR:
4958 break;
4959 default:
4960 gcc_unreachable ();
4963 return NULL_TREE;
4968 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4969 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4970 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4971 false) >= 2)
4972 #endif
4974 /* EXP is some logical combination of boolean tests. See if we can
4975 merge it into some range test. Return the new tree if so. */
4977 static tree
4978 fold_range_test (location_t loc, enum tree_code code, tree type,
4979 tree op0, tree op1)
4981 int or_op = (code == TRUTH_ORIF_EXPR
4982 || code == TRUTH_OR_EXPR);
4983 int in0_p, in1_p, in_p;
4984 tree low0, low1, low, high0, high1, high;
4985 bool strict_overflow_p = false;
4986 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4987 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4988 tree tem;
4989 const char * const warnmsg = G_("assuming signed overflow does not occur "
4990 "when simplifying range test");
4992 /* If this is an OR operation, invert both sides; we will invert
4993 again at the end. */
4994 if (or_op)
4995 in0_p = ! in0_p, in1_p = ! in1_p;
4997 /* If both expressions are the same, if we can merge the ranges, and we
4998 can build the range test, return it or it inverted. If one of the
4999 ranges is always true or always false, consider it to be the same
5000 expression as the other. */
5001 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5002 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5003 in1_p, low1, high1)
5004 && 0 != (tem = (build_range_check (loc, type,
5005 lhs != 0 ? lhs
5006 : rhs != 0 ? rhs : integer_zero_node,
5007 in_p, low, high))))
5009 if (strict_overflow_p)
5010 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5011 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5014 /* On machines where the branch cost is expensive, if this is a
5015 short-circuited branch and the underlying object on both sides
5016 is the same, make a non-short-circuit operation. */
5017 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5018 && lhs != 0 && rhs != 0
5019 && (code == TRUTH_ANDIF_EXPR
5020 || code == TRUTH_ORIF_EXPR)
5021 && operand_equal_p (lhs, rhs, 0))
5023 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5024 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5025 which cases we can't do this. */
5026 if (simple_operand_p (lhs))
5027 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5028 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5029 type, op0, op1);
5031 else if (!lang_hooks.decls.global_bindings_p ()
5032 && !CONTAINS_PLACEHOLDER_P (lhs))
5034 tree common = save_expr (lhs);
5036 if (0 != (lhs = build_range_check (loc, type, common,
5037 or_op ? ! in0_p : in0_p,
5038 low0, high0))
5039 && (0 != (rhs = build_range_check (loc, type, common,
5040 or_op ? ! in1_p : in1_p,
5041 low1, high1))))
5043 if (strict_overflow_p)
5044 fold_overflow_warning (warnmsg,
5045 WARN_STRICT_OVERFLOW_COMPARISON);
5046 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5047 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5048 type, lhs, rhs);
5053 return 0;
5056 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5057 bit value. Arrange things so the extra bits will be set to zero if and
5058 only if C is signed-extended to its full width. If MASK is nonzero,
5059 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5061 static tree
5062 unextend (tree c, int p, int unsignedp, tree mask)
5064 tree type = TREE_TYPE (c);
5065 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5066 tree temp;
5068 if (p == modesize || unsignedp)
5069 return c;
5071 /* We work by getting just the sign bit into the low-order bit, then
5072 into the high-order bit, then sign-extend. We then XOR that value
5073 with C. */
5074 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5075 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5077 /* We must use a signed type in order to get an arithmetic right shift.
5078 However, we must also avoid introducing accidental overflows, so that
5079 a subsequent call to integer_zerop will work. Hence we must
5080 do the type conversion here. At this point, the constant is either
5081 zero or one, and the conversion to a signed type can never overflow.
5082 We could get an overflow if this conversion is done anywhere else. */
5083 if (TYPE_UNSIGNED (type))
5084 temp = fold_convert (signed_type_for (type), temp);
5086 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5087 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5088 if (mask != 0)
5089 temp = const_binop (BIT_AND_EXPR, temp,
5090 fold_convert (TREE_TYPE (c), mask));
5091 /* If necessary, convert the type back to match the type of C. */
5092 if (TYPE_UNSIGNED (type))
5093 temp = fold_convert (type, temp);
5095 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5098 /* For an expression that has the form
5099 (A && B) || ~B
5101 (A || B) && ~B,
5102 we can drop one of the inner expressions and simplify to
5103 A || ~B
5105 A && ~B
5106 LOC is the location of the resulting expression. OP is the inner
5107 logical operation; the left-hand side in the examples above, while CMPOP
5108 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5109 removing a condition that guards another, as in
5110 (A != NULL && A->...) || A == NULL
5111 which we must not transform. If RHS_ONLY is true, only eliminate the
5112 right-most operand of the inner logical operation. */
5114 static tree
5115 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5116 bool rhs_only)
5118 tree type = TREE_TYPE (cmpop);
5119 enum tree_code code = TREE_CODE (cmpop);
5120 enum tree_code truthop_code = TREE_CODE (op);
5121 tree lhs = TREE_OPERAND (op, 0);
5122 tree rhs = TREE_OPERAND (op, 1);
5123 tree orig_lhs = lhs, orig_rhs = rhs;
5124 enum tree_code rhs_code = TREE_CODE (rhs);
5125 enum tree_code lhs_code = TREE_CODE (lhs);
5126 enum tree_code inv_code;
5128 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5129 return NULL_TREE;
5131 if (TREE_CODE_CLASS (code) != tcc_comparison)
5132 return NULL_TREE;
5134 if (rhs_code == truthop_code)
5136 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5137 if (newrhs != NULL_TREE)
5139 rhs = newrhs;
5140 rhs_code = TREE_CODE (rhs);
5143 if (lhs_code == truthop_code && !rhs_only)
5145 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5146 if (newlhs != NULL_TREE)
5148 lhs = newlhs;
5149 lhs_code = TREE_CODE (lhs);
5153 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5154 if (inv_code == rhs_code
5155 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5156 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5157 return lhs;
5158 if (!rhs_only && inv_code == lhs_code
5159 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5160 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5161 return rhs;
5162 if (rhs != orig_rhs || lhs != orig_lhs)
5163 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5164 lhs, rhs);
5165 return NULL_TREE;
5168 /* Find ways of folding logical expressions of LHS and RHS:
5169 Try to merge two comparisons to the same innermost item.
5170 Look for range tests like "ch >= '0' && ch <= '9'".
5171 Look for combinations of simple terms on machines with expensive branches
5172 and evaluate the RHS unconditionally.
5174 For example, if we have p->a == 2 && p->b == 4 and we can make an
5175 object large enough to span both A and B, we can do this with a comparison
5176 against the object ANDed with the a mask.
5178 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5179 operations to do this with one comparison.
5181 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5182 function and the one above.
5184 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5185 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5187 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5188 two operands.
5190 We return the simplified tree or 0 if no optimization is possible. */
5192 static tree
5193 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5194 tree lhs, tree rhs)
5196 /* If this is the "or" of two comparisons, we can do something if
5197 the comparisons are NE_EXPR. If this is the "and", we can do something
5198 if the comparisons are EQ_EXPR. I.e.,
5199 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5201 WANTED_CODE is this operation code. For single bit fields, we can
5202 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5203 comparison for one-bit fields. */
5205 enum tree_code wanted_code;
5206 enum tree_code lcode, rcode;
5207 tree ll_arg, lr_arg, rl_arg, rr_arg;
5208 tree ll_inner, lr_inner, rl_inner, rr_inner;
5209 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5210 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5211 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5212 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5213 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5214 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5215 enum machine_mode lnmode, rnmode;
5216 tree ll_mask, lr_mask, rl_mask, rr_mask;
5217 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5218 tree l_const, r_const;
5219 tree lntype, rntype, result;
5220 HOST_WIDE_INT first_bit, end_bit;
5221 int volatilep;
5223 /* Start by getting the comparison codes. Fail if anything is volatile.
5224 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5225 it were surrounded with a NE_EXPR. */
5227 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5228 return 0;
5230 lcode = TREE_CODE (lhs);
5231 rcode = TREE_CODE (rhs);
5233 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5235 lhs = build2 (NE_EXPR, truth_type, lhs,
5236 build_int_cst (TREE_TYPE (lhs), 0));
5237 lcode = NE_EXPR;
5240 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5242 rhs = build2 (NE_EXPR, truth_type, rhs,
5243 build_int_cst (TREE_TYPE (rhs), 0));
5244 rcode = NE_EXPR;
5247 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5248 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5249 return 0;
5251 ll_arg = TREE_OPERAND (lhs, 0);
5252 lr_arg = TREE_OPERAND (lhs, 1);
5253 rl_arg = TREE_OPERAND (rhs, 0);
5254 rr_arg = TREE_OPERAND (rhs, 1);
5256 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5257 if (simple_operand_p (ll_arg)
5258 && simple_operand_p (lr_arg))
5260 if (operand_equal_p (ll_arg, rl_arg, 0)
5261 && operand_equal_p (lr_arg, rr_arg, 0))
5263 result = combine_comparisons (loc, code, lcode, rcode,
5264 truth_type, ll_arg, lr_arg);
5265 if (result)
5266 return result;
5268 else if (operand_equal_p (ll_arg, rr_arg, 0)
5269 && operand_equal_p (lr_arg, rl_arg, 0))
5271 result = combine_comparisons (loc, code, lcode,
5272 swap_tree_comparison (rcode),
5273 truth_type, ll_arg, lr_arg);
5274 if (result)
5275 return result;
5279 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5280 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5282 /* If the RHS can be evaluated unconditionally and its operands are
5283 simple, it wins to evaluate the RHS unconditionally on machines
5284 with expensive branches. In this case, this isn't a comparison
5285 that can be merged. */
5287 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5288 false) >= 2
5289 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5290 && simple_operand_p (rl_arg)
5291 && simple_operand_p (rr_arg))
5293 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5294 if (code == TRUTH_OR_EXPR
5295 && lcode == NE_EXPR && integer_zerop (lr_arg)
5296 && rcode == NE_EXPR && integer_zerop (rr_arg)
5297 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5298 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5299 return build2_loc (loc, NE_EXPR, truth_type,
5300 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5301 ll_arg, rl_arg),
5302 build_int_cst (TREE_TYPE (ll_arg), 0));
5304 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5305 if (code == TRUTH_AND_EXPR
5306 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5307 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5308 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5309 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5310 return build2_loc (loc, EQ_EXPR, truth_type,
5311 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5312 ll_arg, rl_arg),
5313 build_int_cst (TREE_TYPE (ll_arg), 0));
5316 /* See if the comparisons can be merged. Then get all the parameters for
5317 each side. */
5319 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5320 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5321 return 0;
5323 volatilep = 0;
5324 ll_inner = decode_field_reference (loc, ll_arg,
5325 &ll_bitsize, &ll_bitpos, &ll_mode,
5326 &ll_unsignedp, &volatilep, &ll_mask,
5327 &ll_and_mask);
5328 lr_inner = decode_field_reference (loc, lr_arg,
5329 &lr_bitsize, &lr_bitpos, &lr_mode,
5330 &lr_unsignedp, &volatilep, &lr_mask,
5331 &lr_and_mask);
5332 rl_inner = decode_field_reference (loc, rl_arg,
5333 &rl_bitsize, &rl_bitpos, &rl_mode,
5334 &rl_unsignedp, &volatilep, &rl_mask,
5335 &rl_and_mask);
5336 rr_inner = decode_field_reference (loc, rr_arg,
5337 &rr_bitsize, &rr_bitpos, &rr_mode,
5338 &rr_unsignedp, &volatilep, &rr_mask,
5339 &rr_and_mask);
5341 /* It must be true that the inner operation on the lhs of each
5342 comparison must be the same if we are to be able to do anything.
5343 Then see if we have constants. If not, the same must be true for
5344 the rhs's. */
5345 if (volatilep || ll_inner == 0 || rl_inner == 0
5346 || ! operand_equal_p (ll_inner, rl_inner, 0))
5347 return 0;
5349 if (TREE_CODE (lr_arg) == INTEGER_CST
5350 && TREE_CODE (rr_arg) == INTEGER_CST)
5351 l_const = lr_arg, r_const = rr_arg;
5352 else if (lr_inner == 0 || rr_inner == 0
5353 || ! operand_equal_p (lr_inner, rr_inner, 0))
5354 return 0;
5355 else
5356 l_const = r_const = 0;
5358 /* If either comparison code is not correct for our logical operation,
5359 fail. However, we can convert a one-bit comparison against zero into
5360 the opposite comparison against that bit being set in the field. */
5362 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5363 if (lcode != wanted_code)
5365 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5367 /* Make the left operand unsigned, since we are only interested
5368 in the value of one bit. Otherwise we are doing the wrong
5369 thing below. */
5370 ll_unsignedp = 1;
5371 l_const = ll_mask;
5373 else
5374 return 0;
5377 /* This is analogous to the code for l_const above. */
5378 if (rcode != wanted_code)
5380 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5382 rl_unsignedp = 1;
5383 r_const = rl_mask;
5385 else
5386 return 0;
5389 /* See if we can find a mode that contains both fields being compared on
5390 the left. If we can't, fail. Otherwise, update all constants and masks
5391 to be relative to a field of that size. */
5392 first_bit = MIN (ll_bitpos, rl_bitpos);
5393 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5394 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5395 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5396 volatilep);
5397 if (lnmode == VOIDmode)
5398 return 0;
5400 lnbitsize = GET_MODE_BITSIZE (lnmode);
5401 lnbitpos = first_bit & ~ (lnbitsize - 1);
5402 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5403 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5405 if (BYTES_BIG_ENDIAN)
5407 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5408 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5411 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5412 size_int (xll_bitpos));
5413 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5414 size_int (xrl_bitpos));
5416 if (l_const)
5418 l_const = fold_convert_loc (loc, lntype, l_const);
5419 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5420 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5421 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5422 fold_build1_loc (loc, BIT_NOT_EXPR,
5423 lntype, ll_mask))))
5425 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5427 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5430 if (r_const)
5432 r_const = fold_convert_loc (loc, lntype, r_const);
5433 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5434 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5435 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5436 fold_build1_loc (loc, BIT_NOT_EXPR,
5437 lntype, rl_mask))))
5439 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5441 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5445 /* If the right sides are not constant, do the same for it. Also,
5446 disallow this optimization if a size or signedness mismatch occurs
5447 between the left and right sides. */
5448 if (l_const == 0)
5450 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5451 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5452 /* Make sure the two fields on the right
5453 correspond to the left without being swapped. */
5454 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5455 return 0;
5457 first_bit = MIN (lr_bitpos, rr_bitpos);
5458 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5459 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5460 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5461 volatilep);
5462 if (rnmode == VOIDmode)
5463 return 0;
5465 rnbitsize = GET_MODE_BITSIZE (rnmode);
5466 rnbitpos = first_bit & ~ (rnbitsize - 1);
5467 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5468 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5470 if (BYTES_BIG_ENDIAN)
5472 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5473 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5476 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5477 rntype, lr_mask),
5478 size_int (xlr_bitpos));
5479 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5480 rntype, rr_mask),
5481 size_int (xrr_bitpos));
5483 /* Make a mask that corresponds to both fields being compared.
5484 Do this for both items being compared. If the operands are the
5485 same size and the bits being compared are in the same position
5486 then we can do this by masking both and comparing the masked
5487 results. */
5488 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5489 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5490 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5492 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5493 ll_unsignedp || rl_unsignedp);
5494 if (! all_ones_mask_p (ll_mask, lnbitsize))
5495 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5497 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5498 lr_unsignedp || rr_unsignedp);
5499 if (! all_ones_mask_p (lr_mask, rnbitsize))
5500 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5502 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5505 /* There is still another way we can do something: If both pairs of
5506 fields being compared are adjacent, we may be able to make a wider
5507 field containing them both.
5509 Note that we still must mask the lhs/rhs expressions. Furthermore,
5510 the mask must be shifted to account for the shift done by
5511 make_bit_field_ref. */
5512 if ((ll_bitsize + ll_bitpos == rl_bitpos
5513 && lr_bitsize + lr_bitpos == rr_bitpos)
5514 || (ll_bitpos == rl_bitpos + rl_bitsize
5515 && lr_bitpos == rr_bitpos + rr_bitsize))
5517 tree type;
5519 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5520 ll_bitsize + rl_bitsize,
5521 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5522 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5523 lr_bitsize + rr_bitsize,
5524 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5526 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5527 size_int (MIN (xll_bitpos, xrl_bitpos)));
5528 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5529 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5531 /* Convert to the smaller type before masking out unwanted bits. */
5532 type = lntype;
5533 if (lntype != rntype)
5535 if (lnbitsize > rnbitsize)
5537 lhs = fold_convert_loc (loc, rntype, lhs);
5538 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5539 type = rntype;
5541 else if (lnbitsize < rnbitsize)
5543 rhs = fold_convert_loc (loc, lntype, rhs);
5544 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5545 type = lntype;
5549 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5550 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5552 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5553 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5555 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5558 return 0;
5561 /* Handle the case of comparisons with constants. If there is something in
5562 common between the masks, those bits of the constants must be the same.
5563 If not, the condition is always false. Test for this to avoid generating
5564 incorrect code below. */
5565 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5566 if (! integer_zerop (result)
5567 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5568 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5570 if (wanted_code == NE_EXPR)
5572 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5573 return constant_boolean_node (true, truth_type);
5575 else
5577 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5578 return constant_boolean_node (false, truth_type);
5582 /* Construct the expression we will return. First get the component
5583 reference we will make. Unless the mask is all ones the width of
5584 that field, perform the mask operation. Then compare with the
5585 merged constant. */
5586 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5587 ll_unsignedp || rl_unsignedp);
5589 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5590 if (! all_ones_mask_p (ll_mask, lnbitsize))
5591 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5593 return build2_loc (loc, wanted_code, truth_type, result,
5594 const_binop (BIT_IOR_EXPR, l_const, r_const));
5597 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5598 constant. */
5600 static tree
5601 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5602 tree op0, tree op1)
5604 tree arg0 = op0;
5605 enum tree_code op_code;
5606 tree comp_const;
5607 tree minmax_const;
5608 int consts_equal, consts_lt;
5609 tree inner;
5611 STRIP_SIGN_NOPS (arg0);
5613 op_code = TREE_CODE (arg0);
5614 minmax_const = TREE_OPERAND (arg0, 1);
5615 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5616 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5617 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5618 inner = TREE_OPERAND (arg0, 0);
5620 /* If something does not permit us to optimize, return the original tree. */
5621 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5622 || TREE_CODE (comp_const) != INTEGER_CST
5623 || TREE_OVERFLOW (comp_const)
5624 || TREE_CODE (minmax_const) != INTEGER_CST
5625 || TREE_OVERFLOW (minmax_const))
5626 return NULL_TREE;
5628 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5629 and GT_EXPR, doing the rest with recursive calls using logical
5630 simplifications. */
5631 switch (code)
5633 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5635 tree tem
5636 = optimize_minmax_comparison (loc,
5637 invert_tree_comparison (code, false),
5638 type, op0, op1);
5639 if (tem)
5640 return invert_truthvalue_loc (loc, tem);
5641 return NULL_TREE;
5644 case GE_EXPR:
5645 return
5646 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5647 optimize_minmax_comparison
5648 (loc, EQ_EXPR, type, arg0, comp_const),
5649 optimize_minmax_comparison
5650 (loc, GT_EXPR, type, arg0, comp_const));
5652 case EQ_EXPR:
5653 if (op_code == MAX_EXPR && consts_equal)
5654 /* MAX (X, 0) == 0 -> X <= 0 */
5655 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5657 else if (op_code == MAX_EXPR && consts_lt)
5658 /* MAX (X, 0) == 5 -> X == 5 */
5659 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5661 else if (op_code == MAX_EXPR)
5662 /* MAX (X, 0) == -1 -> false */
5663 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5665 else if (consts_equal)
5666 /* MIN (X, 0) == 0 -> X >= 0 */
5667 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5669 else if (consts_lt)
5670 /* MIN (X, 0) == 5 -> false */
5671 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5673 else
5674 /* MIN (X, 0) == -1 -> X == -1 */
5675 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5677 case GT_EXPR:
5678 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5679 /* MAX (X, 0) > 0 -> X > 0
5680 MAX (X, 0) > 5 -> X > 5 */
5681 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5683 else if (op_code == MAX_EXPR)
5684 /* MAX (X, 0) > -1 -> true */
5685 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5687 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5688 /* MIN (X, 0) > 0 -> false
5689 MIN (X, 0) > 5 -> false */
5690 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5692 else
5693 /* MIN (X, 0) > -1 -> X > -1 */
5694 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5696 default:
5697 return NULL_TREE;
5701 /* T is an integer expression that is being multiplied, divided, or taken a
5702 modulus (CODE says which and what kind of divide or modulus) by a
5703 constant C. See if we can eliminate that operation by folding it with
5704 other operations already in T. WIDE_TYPE, if non-null, is a type that
5705 should be used for the computation if wider than our type.
5707 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5708 (X * 2) + (Y * 4). We must, however, be assured that either the original
5709 expression would not overflow or that overflow is undefined for the type
5710 in the language in question.
5712 If we return a non-null expression, it is an equivalent form of the
5713 original computation, but need not be in the original type.
5715 We set *STRICT_OVERFLOW_P to true if the return values depends on
5716 signed overflow being undefined. Otherwise we do not change
5717 *STRICT_OVERFLOW_P. */
5719 static tree
5720 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5721 bool *strict_overflow_p)
5723 /* To avoid exponential search depth, refuse to allow recursion past
5724 three levels. Beyond that (1) it's highly unlikely that we'll find
5725 something interesting and (2) we've probably processed it before
5726 when we built the inner expression. */
5728 static int depth;
5729 tree ret;
5731 if (depth > 3)
5732 return NULL;
5734 depth++;
5735 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5736 depth--;
5738 return ret;
5741 static tree
5742 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5743 bool *strict_overflow_p)
5745 tree type = TREE_TYPE (t);
5746 enum tree_code tcode = TREE_CODE (t);
5747 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5748 > GET_MODE_SIZE (TYPE_MODE (type)))
5749 ? wide_type : type);
5750 tree t1, t2;
5751 int same_p = tcode == code;
5752 tree op0 = NULL_TREE, op1 = NULL_TREE;
5753 bool sub_strict_overflow_p;
5755 /* Don't deal with constants of zero here; they confuse the code below. */
5756 if (integer_zerop (c))
5757 return NULL_TREE;
5759 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5760 op0 = TREE_OPERAND (t, 0);
5762 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5763 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5765 /* Note that we need not handle conditional operations here since fold
5766 already handles those cases. So just do arithmetic here. */
5767 switch (tcode)
5769 case INTEGER_CST:
5770 /* For a constant, we can always simplify if we are a multiply
5771 or (for divide and modulus) if it is a multiple of our constant. */
5772 if (code == MULT_EXPR
5773 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5774 return const_binop (code, fold_convert (ctype, t),
5775 fold_convert (ctype, c));
5776 break;
5778 CASE_CONVERT: case NON_LVALUE_EXPR:
5779 /* If op0 is an expression ... */
5780 if ((COMPARISON_CLASS_P (op0)
5781 || UNARY_CLASS_P (op0)
5782 || BINARY_CLASS_P (op0)
5783 || VL_EXP_CLASS_P (op0)
5784 || EXPRESSION_CLASS_P (op0))
5785 /* ... and has wrapping overflow, and its type is smaller
5786 than ctype, then we cannot pass through as widening. */
5787 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5788 && (TYPE_PRECISION (ctype)
5789 > TYPE_PRECISION (TREE_TYPE (op0))))
5790 /* ... or this is a truncation (t is narrower than op0),
5791 then we cannot pass through this narrowing. */
5792 || (TYPE_PRECISION (type)
5793 < TYPE_PRECISION (TREE_TYPE (op0)))
5794 /* ... or signedness changes for division or modulus,
5795 then we cannot pass through this conversion. */
5796 || (code != MULT_EXPR
5797 && (TYPE_UNSIGNED (ctype)
5798 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5799 /* ... or has undefined overflow while the converted to
5800 type has not, we cannot do the operation in the inner type
5801 as that would introduce undefined overflow. */
5802 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5803 && !TYPE_OVERFLOW_UNDEFINED (type))))
5804 break;
5806 /* Pass the constant down and see if we can make a simplification. If
5807 we can, replace this expression with the inner simplification for
5808 possible later conversion to our or some other type. */
5809 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5810 && TREE_CODE (t2) == INTEGER_CST
5811 && !TREE_OVERFLOW (t2)
5812 && (0 != (t1 = extract_muldiv (op0, t2, code,
5813 code == MULT_EXPR
5814 ? ctype : NULL_TREE,
5815 strict_overflow_p))))
5816 return t1;
5817 break;
5819 case ABS_EXPR:
5820 /* If widening the type changes it from signed to unsigned, then we
5821 must avoid building ABS_EXPR itself as unsigned. */
5822 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5824 tree cstype = (*signed_type_for) (ctype);
5825 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5826 != 0)
5828 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5829 return fold_convert (ctype, t1);
5831 break;
5833 /* If the constant is negative, we cannot simplify this. */
5834 if (tree_int_cst_sgn (c) == -1)
5835 break;
5836 /* FALLTHROUGH */
5837 case NEGATE_EXPR:
5838 /* For division and modulus, type can't be unsigned, as e.g.
5839 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5840 For signed types, even with wrapping overflow, this is fine. */
5841 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5842 break;
5843 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5844 != 0)
5845 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5846 break;
5848 case MIN_EXPR: case MAX_EXPR:
5849 /* If widening the type changes the signedness, then we can't perform
5850 this optimization as that changes the result. */
5851 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5852 break;
5854 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5855 sub_strict_overflow_p = false;
5856 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5857 &sub_strict_overflow_p)) != 0
5858 && (t2 = extract_muldiv (op1, c, code, wide_type,
5859 &sub_strict_overflow_p)) != 0)
5861 if (tree_int_cst_sgn (c) < 0)
5862 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5863 if (sub_strict_overflow_p)
5864 *strict_overflow_p = true;
5865 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5866 fold_convert (ctype, t2));
5868 break;
5870 case LSHIFT_EXPR: case RSHIFT_EXPR:
5871 /* If the second operand is constant, this is a multiplication
5872 or floor division, by a power of two, so we can treat it that
5873 way unless the multiplier or divisor overflows. Signed
5874 left-shift overflow is implementation-defined rather than
5875 undefined in C90, so do not convert signed left shift into
5876 multiplication. */
5877 if (TREE_CODE (op1) == INTEGER_CST
5878 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5879 /* const_binop may not detect overflow correctly,
5880 so check for it explicitly here. */
5881 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5882 && TREE_INT_CST_HIGH (op1) == 0
5883 && 0 != (t1 = fold_convert (ctype,
5884 const_binop (LSHIFT_EXPR,
5885 size_one_node,
5886 op1)))
5887 && !TREE_OVERFLOW (t1))
5888 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5889 ? MULT_EXPR : FLOOR_DIV_EXPR,
5890 ctype,
5891 fold_convert (ctype, op0),
5892 t1),
5893 c, code, wide_type, strict_overflow_p);
5894 break;
5896 case PLUS_EXPR: case MINUS_EXPR:
5897 /* See if we can eliminate the operation on both sides. If we can, we
5898 can return a new PLUS or MINUS. If we can't, the only remaining
5899 cases where we can do anything are if the second operand is a
5900 constant. */
5901 sub_strict_overflow_p = false;
5902 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5903 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5904 if (t1 != 0 && t2 != 0
5905 && (code == MULT_EXPR
5906 /* If not multiplication, we can only do this if both operands
5907 are divisible by c. */
5908 || (multiple_of_p (ctype, op0, c)
5909 && multiple_of_p (ctype, op1, c))))
5911 if (sub_strict_overflow_p)
5912 *strict_overflow_p = true;
5913 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5914 fold_convert (ctype, t2));
5917 /* If this was a subtraction, negate OP1 and set it to be an addition.
5918 This simplifies the logic below. */
5919 if (tcode == MINUS_EXPR)
5921 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5922 /* If OP1 was not easily negatable, the constant may be OP0. */
5923 if (TREE_CODE (op0) == INTEGER_CST)
5925 tree tem = op0;
5926 op0 = op1;
5927 op1 = tem;
5928 tem = t1;
5929 t1 = t2;
5930 t2 = tem;
5934 if (TREE_CODE (op1) != INTEGER_CST)
5935 break;
5937 /* If either OP1 or C are negative, this optimization is not safe for
5938 some of the division and remainder types while for others we need
5939 to change the code. */
5940 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5942 if (code == CEIL_DIV_EXPR)
5943 code = FLOOR_DIV_EXPR;
5944 else if (code == FLOOR_DIV_EXPR)
5945 code = CEIL_DIV_EXPR;
5946 else if (code != MULT_EXPR
5947 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5948 break;
5951 /* If it's a multiply or a division/modulus operation of a multiple
5952 of our constant, do the operation and verify it doesn't overflow. */
5953 if (code == MULT_EXPR
5954 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5956 op1 = const_binop (code, fold_convert (ctype, op1),
5957 fold_convert (ctype, c));
5958 /* We allow the constant to overflow with wrapping semantics. */
5959 if (op1 == 0
5960 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5961 break;
5963 else
5964 break;
5966 /* If we have an unsigned type, we cannot widen the operation since it
5967 will change the result if the original computation overflowed. */
5968 if (TYPE_UNSIGNED (ctype) && ctype != type)
5969 break;
5971 /* If we were able to eliminate our operation from the first side,
5972 apply our operation to the second side and reform the PLUS. */
5973 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5974 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5976 /* The last case is if we are a multiply. In that case, we can
5977 apply the distributive law to commute the multiply and addition
5978 if the multiplication of the constants doesn't overflow
5979 and overflow is defined. With undefined overflow
5980 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5981 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5982 return fold_build2 (tcode, ctype,
5983 fold_build2 (code, ctype,
5984 fold_convert (ctype, op0),
5985 fold_convert (ctype, c)),
5986 op1);
5988 break;
5990 case MULT_EXPR:
5991 /* We have a special case here if we are doing something like
5992 (C * 8) % 4 since we know that's zero. */
5993 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5994 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5995 /* If the multiplication can overflow we cannot optimize this. */
5996 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5997 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5998 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6000 *strict_overflow_p = true;
6001 return omit_one_operand (type, integer_zero_node, op0);
6004 /* ... fall through ... */
6006 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6007 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6008 /* If we can extract our operation from the LHS, do so and return a
6009 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6010 do something only if the second operand is a constant. */
6011 if (same_p
6012 && (t1 = extract_muldiv (op0, c, code, wide_type,
6013 strict_overflow_p)) != 0)
6014 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6015 fold_convert (ctype, op1));
6016 else if (tcode == MULT_EXPR && code == MULT_EXPR
6017 && (t1 = extract_muldiv (op1, c, code, wide_type,
6018 strict_overflow_p)) != 0)
6019 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6020 fold_convert (ctype, t1));
6021 else if (TREE_CODE (op1) != INTEGER_CST)
6022 return 0;
6024 /* If these are the same operation types, we can associate them
6025 assuming no overflow. */
6026 if (tcode == code)
6028 double_int mul;
6029 bool overflow_p;
6030 unsigned prec = TYPE_PRECISION (ctype);
6031 bool uns = TYPE_UNSIGNED (ctype);
6032 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6033 double_int dic = tree_to_double_int (c).ext (prec, uns);
6034 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6035 overflow_p = ((!uns && overflow_p)
6036 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6037 if (!double_int_fits_to_tree_p (ctype, mul)
6038 && ((uns && tcode != MULT_EXPR) || !uns))
6039 overflow_p = 1;
6040 if (!overflow_p)
6041 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6042 double_int_to_tree (ctype, mul));
6045 /* If these operations "cancel" each other, we have the main
6046 optimizations of this pass, which occur when either constant is a
6047 multiple of the other, in which case we replace this with either an
6048 operation or CODE or TCODE.
6050 If we have an unsigned type, we cannot do this since it will change
6051 the result if the original computation overflowed. */
6052 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6053 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6054 || (tcode == MULT_EXPR
6055 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6056 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6057 && code != MULT_EXPR)))
6059 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6061 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6062 *strict_overflow_p = true;
6063 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6064 fold_convert (ctype,
6065 const_binop (TRUNC_DIV_EXPR,
6066 op1, c)));
6068 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6070 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6071 *strict_overflow_p = true;
6072 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6073 fold_convert (ctype,
6074 const_binop (TRUNC_DIV_EXPR,
6075 c, op1)));
6078 break;
6080 default:
6081 break;
6084 return 0;
6087 /* Return a node which has the indicated constant VALUE (either 0 or
6088 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6089 and is of the indicated TYPE. */
6091 tree
6092 constant_boolean_node (bool value, tree type)
6094 if (type == integer_type_node)
6095 return value ? integer_one_node : integer_zero_node;
6096 else if (type == boolean_type_node)
6097 return value ? boolean_true_node : boolean_false_node;
6098 else if (TREE_CODE (type) == VECTOR_TYPE)
6099 return build_vector_from_val (type,
6100 build_int_cst (TREE_TYPE (type),
6101 value ? -1 : 0));
6102 else
6103 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6107 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6108 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6109 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6110 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6111 COND is the first argument to CODE; otherwise (as in the example
6112 given here), it is the second argument. TYPE is the type of the
6113 original expression. Return NULL_TREE if no simplification is
6114 possible. */
6116 static tree
6117 fold_binary_op_with_conditional_arg (location_t loc,
6118 enum tree_code code,
6119 tree type, tree op0, tree op1,
6120 tree cond, tree arg, int cond_first_p)
6122 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6123 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6124 tree test, true_value, false_value;
6125 tree lhs = NULL_TREE;
6126 tree rhs = NULL_TREE;
6127 enum tree_code cond_code = COND_EXPR;
6129 if (TREE_CODE (cond) == COND_EXPR
6130 || TREE_CODE (cond) == VEC_COND_EXPR)
6132 test = TREE_OPERAND (cond, 0);
6133 true_value = TREE_OPERAND (cond, 1);
6134 false_value = TREE_OPERAND (cond, 2);
6135 /* If this operand throws an expression, then it does not make
6136 sense to try to perform a logical or arithmetic operation
6137 involving it. */
6138 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6139 lhs = true_value;
6140 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6141 rhs = false_value;
6143 else
6145 tree testtype = TREE_TYPE (cond);
6146 test = cond;
6147 true_value = constant_boolean_node (true, testtype);
6148 false_value = constant_boolean_node (false, testtype);
6151 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6152 cond_code = VEC_COND_EXPR;
6154 /* This transformation is only worthwhile if we don't have to wrap ARG
6155 in a SAVE_EXPR and the operation can be simplified without recursing
6156 on at least one of the branches once its pushed inside the COND_EXPR. */
6157 if (!TREE_CONSTANT (arg)
6158 && (TREE_SIDE_EFFECTS (arg)
6159 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6160 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6161 return NULL_TREE;
6163 arg = fold_convert_loc (loc, arg_type, arg);
6164 if (lhs == 0)
6166 true_value = fold_convert_loc (loc, cond_type, true_value);
6167 if (cond_first_p)
6168 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6169 else
6170 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6172 if (rhs == 0)
6174 false_value = fold_convert_loc (loc, cond_type, false_value);
6175 if (cond_first_p)
6176 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6177 else
6178 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6181 /* Check that we have simplified at least one of the branches. */
6182 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6183 return NULL_TREE;
6185 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6189 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6191 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6192 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6193 ADDEND is the same as X.
6195 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6196 and finite. The problematic cases are when X is zero, and its mode
6197 has signed zeros. In the case of rounding towards -infinity,
6198 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6199 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6201 bool
6202 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6204 if (!real_zerop (addend))
6205 return false;
6207 /* Don't allow the fold with -fsignaling-nans. */
6208 if (HONOR_SNANS (TYPE_MODE (type)))
6209 return false;
6211 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6212 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6213 return true;
6215 /* In a vector or complex, we would need to check the sign of all zeros. */
6216 if (TREE_CODE (addend) != REAL_CST)
6217 return false;
6219 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6220 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6221 negate = !negate;
6223 /* The mode has signed zeros, and we have to honor their sign.
6224 In this situation, there is only one case we can return true for.
6225 X - 0 is the same as X unless rounding towards -infinity is
6226 supported. */
6227 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6230 /* Subroutine of fold() that checks comparisons of built-in math
6231 functions against real constants.
6233 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6234 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6235 is the type of the result and ARG0 and ARG1 are the operands of the
6236 comparison. ARG1 must be a TREE_REAL_CST.
6238 The function returns the constant folded tree if a simplification
6239 can be made, and NULL_TREE otherwise. */
6241 static tree
6242 fold_mathfn_compare (location_t loc,
6243 enum built_in_function fcode, enum tree_code code,
6244 tree type, tree arg0, tree arg1)
6246 REAL_VALUE_TYPE c;
6248 if (BUILTIN_SQRT_P (fcode))
6250 tree arg = CALL_EXPR_ARG (arg0, 0);
6251 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6253 c = TREE_REAL_CST (arg1);
6254 if (REAL_VALUE_NEGATIVE (c))
6256 /* sqrt(x) < y is always false, if y is negative. */
6257 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6258 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6260 /* sqrt(x) > y is always true, if y is negative and we
6261 don't care about NaNs, i.e. negative values of x. */
6262 if (code == NE_EXPR || !HONOR_NANS (mode))
6263 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6265 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6266 return fold_build2_loc (loc, GE_EXPR, type, arg,
6267 build_real (TREE_TYPE (arg), dconst0));
6269 else if (code == GT_EXPR || code == GE_EXPR)
6271 REAL_VALUE_TYPE c2;
6273 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6274 real_convert (&c2, mode, &c2);
6276 if (REAL_VALUE_ISINF (c2))
6278 /* sqrt(x) > y is x == +Inf, when y is very large. */
6279 if (HONOR_INFINITIES (mode))
6280 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6281 build_real (TREE_TYPE (arg), c2));
6283 /* sqrt(x) > y is always false, when y is very large
6284 and we don't care about infinities. */
6285 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6288 /* sqrt(x) > c is the same as x > c*c. */
6289 return fold_build2_loc (loc, code, type, arg,
6290 build_real (TREE_TYPE (arg), c2));
6292 else if (code == LT_EXPR || code == LE_EXPR)
6294 REAL_VALUE_TYPE c2;
6296 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6297 real_convert (&c2, mode, &c2);
6299 if (REAL_VALUE_ISINF (c2))
6301 /* sqrt(x) < y is always true, when y is a very large
6302 value and we don't care about NaNs or Infinities. */
6303 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6304 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6306 /* sqrt(x) < y is x != +Inf when y is very large and we
6307 don't care about NaNs. */
6308 if (! HONOR_NANS (mode))
6309 return fold_build2_loc (loc, NE_EXPR, type, arg,
6310 build_real (TREE_TYPE (arg), c2));
6312 /* sqrt(x) < y is x >= 0 when y is very large and we
6313 don't care about Infinities. */
6314 if (! HONOR_INFINITIES (mode))
6315 return fold_build2_loc (loc, GE_EXPR, type, arg,
6316 build_real (TREE_TYPE (arg), dconst0));
6318 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6319 arg = save_expr (arg);
6320 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6321 fold_build2_loc (loc, GE_EXPR, type, arg,
6322 build_real (TREE_TYPE (arg),
6323 dconst0)),
6324 fold_build2_loc (loc, NE_EXPR, type, arg,
6325 build_real (TREE_TYPE (arg),
6326 c2)));
6329 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6330 if (! HONOR_NANS (mode))
6331 return fold_build2_loc (loc, code, type, arg,
6332 build_real (TREE_TYPE (arg), c2));
6334 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6335 arg = save_expr (arg);
6336 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6337 fold_build2_loc (loc, GE_EXPR, type, arg,
6338 build_real (TREE_TYPE (arg),
6339 dconst0)),
6340 fold_build2_loc (loc, code, type, arg,
6341 build_real (TREE_TYPE (arg),
6342 c2)));
6346 return NULL_TREE;
6349 /* Subroutine of fold() that optimizes comparisons against Infinities,
6350 either +Inf or -Inf.
6352 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6353 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6354 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6356 The function returns the constant folded tree if a simplification
6357 can be made, and NULL_TREE otherwise. */
6359 static tree
6360 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6361 tree arg0, tree arg1)
6363 enum machine_mode mode;
6364 REAL_VALUE_TYPE max;
6365 tree temp;
6366 bool neg;
6368 mode = TYPE_MODE (TREE_TYPE (arg0));
6370 /* For negative infinity swap the sense of the comparison. */
6371 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6372 if (neg)
6373 code = swap_tree_comparison (code);
6375 switch (code)
6377 case GT_EXPR:
6378 /* x > +Inf is always false, if with ignore sNANs. */
6379 if (HONOR_SNANS (mode))
6380 return NULL_TREE;
6381 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6383 case LE_EXPR:
6384 /* x <= +Inf is always true, if we don't case about NaNs. */
6385 if (! HONOR_NANS (mode))
6386 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6388 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6389 arg0 = save_expr (arg0);
6390 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6392 case EQ_EXPR:
6393 case GE_EXPR:
6394 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6395 real_maxval (&max, neg, mode);
6396 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6397 arg0, build_real (TREE_TYPE (arg0), max));
6399 case LT_EXPR:
6400 /* x < +Inf is always equal to x <= DBL_MAX. */
6401 real_maxval (&max, neg, mode);
6402 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6403 arg0, build_real (TREE_TYPE (arg0), max));
6405 case NE_EXPR:
6406 /* x != +Inf is always equal to !(x > DBL_MAX). */
6407 real_maxval (&max, neg, mode);
6408 if (! HONOR_NANS (mode))
6409 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6410 arg0, build_real (TREE_TYPE (arg0), max));
6412 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6413 arg0, build_real (TREE_TYPE (arg0), max));
6414 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6416 default:
6417 break;
6420 return NULL_TREE;
6423 /* Subroutine of fold() that optimizes comparisons of a division by
6424 a nonzero integer constant against an integer constant, i.e.
6425 X/C1 op C2.
6427 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6428 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6429 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6431 The function returns the constant folded tree if a simplification
6432 can be made, and NULL_TREE otherwise. */
6434 static tree
6435 fold_div_compare (location_t loc,
6436 enum tree_code code, tree type, tree arg0, tree arg1)
6438 tree prod, tmp, hi, lo;
6439 tree arg00 = TREE_OPERAND (arg0, 0);
6440 tree arg01 = TREE_OPERAND (arg0, 1);
6441 double_int val;
6442 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6443 bool neg_overflow;
6444 bool overflow;
6446 /* We have to do this the hard way to detect unsigned overflow.
6447 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6448 val = TREE_INT_CST (arg01)
6449 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6450 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6451 neg_overflow = false;
6453 if (unsigned_p)
6455 tmp = int_const_binop (MINUS_EXPR, arg01,
6456 build_int_cst (TREE_TYPE (arg01), 1));
6457 lo = prod;
6459 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6460 val = TREE_INT_CST (prod)
6461 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6462 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6463 -1, overflow | TREE_OVERFLOW (prod));
6465 else if (tree_int_cst_sgn (arg01) >= 0)
6467 tmp = int_const_binop (MINUS_EXPR, arg01,
6468 build_int_cst (TREE_TYPE (arg01), 1));
6469 switch (tree_int_cst_sgn (arg1))
6471 case -1:
6472 neg_overflow = true;
6473 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6474 hi = prod;
6475 break;
6477 case 0:
6478 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6479 hi = tmp;
6480 break;
6482 case 1:
6483 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6484 lo = prod;
6485 break;
6487 default:
6488 gcc_unreachable ();
6491 else
6493 /* A negative divisor reverses the relational operators. */
6494 code = swap_tree_comparison (code);
6496 tmp = int_const_binop (PLUS_EXPR, arg01,
6497 build_int_cst (TREE_TYPE (arg01), 1));
6498 switch (tree_int_cst_sgn (arg1))
6500 case -1:
6501 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6502 lo = prod;
6503 break;
6505 case 0:
6506 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6507 lo = tmp;
6508 break;
6510 case 1:
6511 neg_overflow = true;
6512 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6513 hi = prod;
6514 break;
6516 default:
6517 gcc_unreachable ();
6521 switch (code)
6523 case EQ_EXPR:
6524 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6525 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6526 if (TREE_OVERFLOW (hi))
6527 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6528 if (TREE_OVERFLOW (lo))
6529 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6530 return build_range_check (loc, type, arg00, 1, lo, hi);
6532 case NE_EXPR:
6533 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6534 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6535 if (TREE_OVERFLOW (hi))
6536 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6537 if (TREE_OVERFLOW (lo))
6538 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6539 return build_range_check (loc, type, arg00, 0, lo, hi);
6541 case LT_EXPR:
6542 if (TREE_OVERFLOW (lo))
6544 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6545 return omit_one_operand_loc (loc, type, tmp, arg00);
6547 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6549 case LE_EXPR:
6550 if (TREE_OVERFLOW (hi))
6552 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6553 return omit_one_operand_loc (loc, type, tmp, arg00);
6555 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6557 case GT_EXPR:
6558 if (TREE_OVERFLOW (hi))
6560 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6561 return omit_one_operand_loc (loc, type, tmp, arg00);
6563 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6565 case GE_EXPR:
6566 if (TREE_OVERFLOW (lo))
6568 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6569 return omit_one_operand_loc (loc, type, tmp, arg00);
6571 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6573 default:
6574 break;
6577 return NULL_TREE;
6581 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6582 equality/inequality test, then return a simplified form of the test
6583 using a sign testing. Otherwise return NULL. TYPE is the desired
6584 result type. */
6586 static tree
6587 fold_single_bit_test_into_sign_test (location_t loc,
6588 enum tree_code code, tree arg0, tree arg1,
6589 tree result_type)
6591 /* If this is testing a single bit, we can optimize the test. */
6592 if ((code == NE_EXPR || code == EQ_EXPR)
6593 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6594 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6596 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6597 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6598 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6600 if (arg00 != NULL_TREE
6601 /* This is only a win if casting to a signed type is cheap,
6602 i.e. when arg00's type is not a partial mode. */
6603 && TYPE_PRECISION (TREE_TYPE (arg00))
6604 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6606 tree stype = signed_type_for (TREE_TYPE (arg00));
6607 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6608 result_type,
6609 fold_convert_loc (loc, stype, arg00),
6610 build_int_cst (stype, 0));
6614 return NULL_TREE;
6617 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6618 equality/inequality test, then return a simplified form of
6619 the test using shifts and logical operations. Otherwise return
6620 NULL. TYPE is the desired result type. */
6622 tree
6623 fold_single_bit_test (location_t loc, enum tree_code code,
6624 tree arg0, tree arg1, tree result_type)
6626 /* If this is testing a single bit, we can optimize the test. */
6627 if ((code == NE_EXPR || code == EQ_EXPR)
6628 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6629 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6631 tree inner = TREE_OPERAND (arg0, 0);
6632 tree type = TREE_TYPE (arg0);
6633 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6634 enum machine_mode operand_mode = TYPE_MODE (type);
6635 int ops_unsigned;
6636 tree signed_type, unsigned_type, intermediate_type;
6637 tree tem, one;
6639 /* First, see if we can fold the single bit test into a sign-bit
6640 test. */
6641 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6642 result_type);
6643 if (tem)
6644 return tem;
6646 /* Otherwise we have (A & C) != 0 where C is a single bit,
6647 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6648 Similarly for (A & C) == 0. */
6650 /* If INNER is a right shift of a constant and it plus BITNUM does
6651 not overflow, adjust BITNUM and INNER. */
6652 if (TREE_CODE (inner) == RSHIFT_EXPR
6653 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6654 && host_integerp (TREE_OPERAND (inner, 1), 1)
6655 && bitnum < TYPE_PRECISION (type)
6656 && (TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
6657 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6659 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6660 inner = TREE_OPERAND (inner, 0);
6663 /* If we are going to be able to omit the AND below, we must do our
6664 operations as unsigned. If we must use the AND, we have a choice.
6665 Normally unsigned is faster, but for some machines signed is. */
6666 #ifdef LOAD_EXTEND_OP
6667 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6668 && !flag_syntax_only) ? 0 : 1;
6669 #else
6670 ops_unsigned = 1;
6671 #endif
6673 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6674 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6675 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6676 inner = fold_convert_loc (loc, intermediate_type, inner);
6678 if (bitnum != 0)
6679 inner = build2 (RSHIFT_EXPR, intermediate_type,
6680 inner, size_int (bitnum));
6682 one = build_int_cst (intermediate_type, 1);
6684 if (code == EQ_EXPR)
6685 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6687 /* Put the AND last so it can combine with more things. */
6688 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6690 /* Make sure to return the proper type. */
6691 inner = fold_convert_loc (loc, result_type, inner);
6693 return inner;
6695 return NULL_TREE;
6698 /* Check whether we are allowed to reorder operands arg0 and arg1,
6699 such that the evaluation of arg1 occurs before arg0. */
6701 static bool
6702 reorder_operands_p (const_tree arg0, const_tree arg1)
6704 if (! flag_evaluation_order)
6705 return true;
6706 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6707 return true;
6708 return ! TREE_SIDE_EFFECTS (arg0)
6709 && ! TREE_SIDE_EFFECTS (arg1);
6712 /* Test whether it is preferable two swap two operands, ARG0 and
6713 ARG1, for example because ARG0 is an integer constant and ARG1
6714 isn't. If REORDER is true, only recommend swapping if we can
6715 evaluate the operands in reverse order. */
6717 bool
6718 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6720 STRIP_SIGN_NOPS (arg0);
6721 STRIP_SIGN_NOPS (arg1);
6723 if (TREE_CODE (arg1) == INTEGER_CST)
6724 return 0;
6725 if (TREE_CODE (arg0) == INTEGER_CST)
6726 return 1;
6728 if (TREE_CODE (arg1) == REAL_CST)
6729 return 0;
6730 if (TREE_CODE (arg0) == REAL_CST)
6731 return 1;
6733 if (TREE_CODE (arg1) == FIXED_CST)
6734 return 0;
6735 if (TREE_CODE (arg0) == FIXED_CST)
6736 return 1;
6738 if (TREE_CODE (arg1) == COMPLEX_CST)
6739 return 0;
6740 if (TREE_CODE (arg0) == COMPLEX_CST)
6741 return 1;
6743 if (TREE_CONSTANT (arg1))
6744 return 0;
6745 if (TREE_CONSTANT (arg0))
6746 return 1;
6748 if (optimize_function_for_size_p (cfun))
6749 return 0;
6751 if (reorder && flag_evaluation_order
6752 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6753 return 0;
6755 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6756 for commutative and comparison operators. Ensuring a canonical
6757 form allows the optimizers to find additional redundancies without
6758 having to explicitly check for both orderings. */
6759 if (TREE_CODE (arg0) == SSA_NAME
6760 && TREE_CODE (arg1) == SSA_NAME
6761 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6762 return 1;
6764 /* Put SSA_NAMEs last. */
6765 if (TREE_CODE (arg1) == SSA_NAME)
6766 return 0;
6767 if (TREE_CODE (arg0) == SSA_NAME)
6768 return 1;
6770 /* Put variables last. */
6771 if (DECL_P (arg1))
6772 return 0;
6773 if (DECL_P (arg0))
6774 return 1;
6776 return 0;
6779 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6780 ARG0 is extended to a wider type. */
6782 static tree
6783 fold_widened_comparison (location_t loc, enum tree_code code,
6784 tree type, tree arg0, tree arg1)
6786 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6787 tree arg1_unw;
6788 tree shorter_type, outer_type;
6789 tree min, max;
6790 bool above, below;
6792 if (arg0_unw == arg0)
6793 return NULL_TREE;
6794 shorter_type = TREE_TYPE (arg0_unw);
6796 #ifdef HAVE_canonicalize_funcptr_for_compare
6797 /* Disable this optimization if we're casting a function pointer
6798 type on targets that require function pointer canonicalization. */
6799 if (HAVE_canonicalize_funcptr_for_compare
6800 && TREE_CODE (shorter_type) == POINTER_TYPE
6801 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6802 return NULL_TREE;
6803 #endif
6805 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6806 return NULL_TREE;
6808 arg1_unw = get_unwidened (arg1, NULL_TREE);
6810 /* If possible, express the comparison in the shorter mode. */
6811 if ((code == EQ_EXPR || code == NE_EXPR
6812 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6813 && (TREE_TYPE (arg1_unw) == shorter_type
6814 || ((TYPE_PRECISION (shorter_type)
6815 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6816 && (TYPE_UNSIGNED (shorter_type)
6817 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6818 || (TREE_CODE (arg1_unw) == INTEGER_CST
6819 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6820 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6821 && int_fits_type_p (arg1_unw, shorter_type))))
6822 return fold_build2_loc (loc, code, type, arg0_unw,
6823 fold_convert_loc (loc, shorter_type, arg1_unw));
6825 if (TREE_CODE (arg1_unw) != INTEGER_CST
6826 || TREE_CODE (shorter_type) != INTEGER_TYPE
6827 || !int_fits_type_p (arg1_unw, shorter_type))
6828 return NULL_TREE;
6830 /* If we are comparing with the integer that does not fit into the range
6831 of the shorter type, the result is known. */
6832 outer_type = TREE_TYPE (arg1_unw);
6833 min = lower_bound_in_type (outer_type, shorter_type);
6834 max = upper_bound_in_type (outer_type, shorter_type);
6836 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6837 max, arg1_unw));
6838 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6839 arg1_unw, min));
6841 switch (code)
6843 case EQ_EXPR:
6844 if (above || below)
6845 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6846 break;
6848 case NE_EXPR:
6849 if (above || below)
6850 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6851 break;
6853 case LT_EXPR:
6854 case LE_EXPR:
6855 if (above)
6856 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6857 else if (below)
6858 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6860 case GT_EXPR:
6861 case GE_EXPR:
6862 if (above)
6863 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6864 else if (below)
6865 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6867 default:
6868 break;
6871 return NULL_TREE;
6874 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6875 ARG0 just the signedness is changed. */
6877 static tree
6878 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6879 tree arg0, tree arg1)
6881 tree arg0_inner;
6882 tree inner_type, outer_type;
6884 if (!CONVERT_EXPR_P (arg0))
6885 return NULL_TREE;
6887 outer_type = TREE_TYPE (arg0);
6888 arg0_inner = TREE_OPERAND (arg0, 0);
6889 inner_type = TREE_TYPE (arg0_inner);
6891 #ifdef HAVE_canonicalize_funcptr_for_compare
6892 /* Disable this optimization if we're casting a function pointer
6893 type on targets that require function pointer canonicalization. */
6894 if (HAVE_canonicalize_funcptr_for_compare
6895 && TREE_CODE (inner_type) == POINTER_TYPE
6896 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6897 return NULL_TREE;
6898 #endif
6900 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6901 return NULL_TREE;
6903 if (TREE_CODE (arg1) != INTEGER_CST
6904 && !(CONVERT_EXPR_P (arg1)
6905 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6906 return NULL_TREE;
6908 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6909 && code != NE_EXPR
6910 && code != EQ_EXPR)
6911 return NULL_TREE;
6913 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6914 return NULL_TREE;
6916 if (TREE_CODE (arg1) == INTEGER_CST)
6917 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6918 0, TREE_OVERFLOW (arg1));
6919 else
6920 arg1 = fold_convert_loc (loc, inner_type, arg1);
6922 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6925 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6926 step of the array. Reconstructs s and delta in the case of s *
6927 delta being an integer constant (and thus already folded). ADDR is
6928 the address. MULT is the multiplicative expression. If the
6929 function succeeds, the new address expression is returned.
6930 Otherwise NULL_TREE is returned. LOC is the location of the
6931 resulting expression. */
6933 static tree
6934 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6936 tree s, delta, step;
6937 tree ref = TREE_OPERAND (addr, 0), pref;
6938 tree ret, pos;
6939 tree itype;
6940 bool mdim = false;
6942 /* Strip the nops that might be added when converting op1 to sizetype. */
6943 STRIP_NOPS (op1);
6945 /* Canonicalize op1 into a possibly non-constant delta
6946 and an INTEGER_CST s. */
6947 if (TREE_CODE (op1) == MULT_EXPR)
6949 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6951 STRIP_NOPS (arg0);
6952 STRIP_NOPS (arg1);
6954 if (TREE_CODE (arg0) == INTEGER_CST)
6956 s = arg0;
6957 delta = arg1;
6959 else if (TREE_CODE (arg1) == INTEGER_CST)
6961 s = arg1;
6962 delta = arg0;
6964 else
6965 return NULL_TREE;
6967 else if (TREE_CODE (op1) == INTEGER_CST)
6969 delta = op1;
6970 s = NULL_TREE;
6972 else
6974 /* Simulate we are delta * 1. */
6975 delta = op1;
6976 s = integer_one_node;
6979 /* Handle &x.array the same as we would handle &x.array[0]. */
6980 if (TREE_CODE (ref) == COMPONENT_REF
6981 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6983 tree domain;
6985 /* Remember if this was a multi-dimensional array. */
6986 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6987 mdim = true;
6989 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6990 if (! domain)
6991 goto cont;
6992 itype = TREE_TYPE (domain);
6994 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6995 if (TREE_CODE (step) != INTEGER_CST)
6996 goto cont;
6998 if (s)
7000 if (! tree_int_cst_equal (step, s))
7001 goto cont;
7003 else
7005 /* Try if delta is a multiple of step. */
7006 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7007 if (! tmp)
7008 goto cont;
7009 delta = tmp;
7012 /* Only fold here if we can verify we do not overflow one
7013 dimension of a multi-dimensional array. */
7014 if (mdim)
7016 tree tmp;
7018 if (!TYPE_MIN_VALUE (domain)
7019 || !TYPE_MAX_VALUE (domain)
7020 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7021 goto cont;
7023 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7024 fold_convert_loc (loc, itype,
7025 TYPE_MIN_VALUE (domain)),
7026 fold_convert_loc (loc, itype, delta));
7027 if (TREE_CODE (tmp) != INTEGER_CST
7028 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7029 goto cont;
7032 /* We found a suitable component reference. */
7034 pref = TREE_OPERAND (addr, 0);
7035 ret = copy_node (pref);
7036 SET_EXPR_LOCATION (ret, loc);
7038 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7039 fold_build2_loc
7040 (loc, PLUS_EXPR, itype,
7041 fold_convert_loc (loc, itype,
7042 TYPE_MIN_VALUE
7043 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7044 fold_convert_loc (loc, itype, delta)),
7045 NULL_TREE, NULL_TREE);
7046 return build_fold_addr_expr_loc (loc, ret);
7049 cont:
7051 for (;; ref = TREE_OPERAND (ref, 0))
7053 if (TREE_CODE (ref) == ARRAY_REF)
7055 tree domain;
7057 /* Remember if this was a multi-dimensional array. */
7058 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7059 mdim = true;
7061 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7062 if (! domain)
7063 continue;
7064 itype = TREE_TYPE (domain);
7066 step = array_ref_element_size (ref);
7067 if (TREE_CODE (step) != INTEGER_CST)
7068 continue;
7070 if (s)
7072 if (! tree_int_cst_equal (step, s))
7073 continue;
7075 else
7077 /* Try if delta is a multiple of step. */
7078 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7079 if (! tmp)
7080 continue;
7081 delta = tmp;
7084 /* Only fold here if we can verify we do not overflow one
7085 dimension of a multi-dimensional array. */
7086 if (mdim)
7088 tree tmp;
7090 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7091 || !TYPE_MAX_VALUE (domain)
7092 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7093 continue;
7095 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7096 fold_convert_loc (loc, itype,
7097 TREE_OPERAND (ref, 1)),
7098 fold_convert_loc (loc, itype, delta));
7099 if (!tmp
7100 || TREE_CODE (tmp) != INTEGER_CST
7101 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7102 continue;
7105 break;
7107 else
7108 mdim = false;
7110 if (!handled_component_p (ref))
7111 return NULL_TREE;
7114 /* We found the suitable array reference. So copy everything up to it,
7115 and replace the index. */
7117 pref = TREE_OPERAND (addr, 0);
7118 ret = copy_node (pref);
7119 SET_EXPR_LOCATION (ret, loc);
7120 pos = ret;
7122 while (pref != ref)
7124 pref = TREE_OPERAND (pref, 0);
7125 TREE_OPERAND (pos, 0) = copy_node (pref);
7126 pos = TREE_OPERAND (pos, 0);
7129 TREE_OPERAND (pos, 1)
7130 = fold_build2_loc (loc, PLUS_EXPR, itype,
7131 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7132 fold_convert_loc (loc, itype, delta));
7133 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7137 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7138 means A >= Y && A != MAX, but in this case we know that
7139 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7141 static tree
7142 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7144 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7146 if (TREE_CODE (bound) == LT_EXPR)
7147 a = TREE_OPERAND (bound, 0);
7148 else if (TREE_CODE (bound) == GT_EXPR)
7149 a = TREE_OPERAND (bound, 1);
7150 else
7151 return NULL_TREE;
7153 typea = TREE_TYPE (a);
7154 if (!INTEGRAL_TYPE_P (typea)
7155 && !POINTER_TYPE_P (typea))
7156 return NULL_TREE;
7158 if (TREE_CODE (ineq) == LT_EXPR)
7160 a1 = TREE_OPERAND (ineq, 1);
7161 y = TREE_OPERAND (ineq, 0);
7163 else if (TREE_CODE (ineq) == GT_EXPR)
7165 a1 = TREE_OPERAND (ineq, 0);
7166 y = TREE_OPERAND (ineq, 1);
7168 else
7169 return NULL_TREE;
7171 if (TREE_TYPE (a1) != typea)
7172 return NULL_TREE;
7174 if (POINTER_TYPE_P (typea))
7176 /* Convert the pointer types into integer before taking the difference. */
7177 tree ta = fold_convert_loc (loc, ssizetype, a);
7178 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7179 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7181 else
7182 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7184 if (!diff || !integer_onep (diff))
7185 return NULL_TREE;
7187 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7190 /* Fold a sum or difference of at least one multiplication.
7191 Returns the folded tree or NULL if no simplification could be made. */
7193 static tree
7194 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7195 tree arg0, tree arg1)
7197 tree arg00, arg01, arg10, arg11;
7198 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7200 /* (A * C) +- (B * C) -> (A+-B) * C.
7201 (A * C) +- A -> A * (C+-1).
7202 We are most concerned about the case where C is a constant,
7203 but other combinations show up during loop reduction. Since
7204 it is not difficult, try all four possibilities. */
7206 if (TREE_CODE (arg0) == MULT_EXPR)
7208 arg00 = TREE_OPERAND (arg0, 0);
7209 arg01 = TREE_OPERAND (arg0, 1);
7211 else if (TREE_CODE (arg0) == INTEGER_CST)
7213 arg00 = build_one_cst (type);
7214 arg01 = arg0;
7216 else
7218 /* We cannot generate constant 1 for fract. */
7219 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7220 return NULL_TREE;
7221 arg00 = arg0;
7222 arg01 = build_one_cst (type);
7224 if (TREE_CODE (arg1) == MULT_EXPR)
7226 arg10 = TREE_OPERAND (arg1, 0);
7227 arg11 = TREE_OPERAND (arg1, 1);
7229 else if (TREE_CODE (arg1) == INTEGER_CST)
7231 arg10 = build_one_cst (type);
7232 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7233 the purpose of this canonicalization. */
7234 if (TREE_INT_CST_HIGH (arg1) == -1
7235 && negate_expr_p (arg1)
7236 && code == PLUS_EXPR)
7238 arg11 = negate_expr (arg1);
7239 code = MINUS_EXPR;
7241 else
7242 arg11 = arg1;
7244 else
7246 /* We cannot generate constant 1 for fract. */
7247 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7248 return NULL_TREE;
7249 arg10 = arg1;
7250 arg11 = build_one_cst (type);
7252 same = NULL_TREE;
7254 if (operand_equal_p (arg01, arg11, 0))
7255 same = arg01, alt0 = arg00, alt1 = arg10;
7256 else if (operand_equal_p (arg00, arg10, 0))
7257 same = arg00, alt0 = arg01, alt1 = arg11;
7258 else if (operand_equal_p (arg00, arg11, 0))
7259 same = arg00, alt0 = arg01, alt1 = arg10;
7260 else if (operand_equal_p (arg01, arg10, 0))
7261 same = arg01, alt0 = arg00, alt1 = arg11;
7263 /* No identical multiplicands; see if we can find a common
7264 power-of-two factor in non-power-of-two multiplies. This
7265 can help in multi-dimensional array access. */
7266 else if (host_integerp (arg01, 0)
7267 && host_integerp (arg11, 0))
7269 HOST_WIDE_INT int01, int11, tmp;
7270 bool swap = false;
7271 tree maybe_same;
7272 int01 = TREE_INT_CST_LOW (arg01);
7273 int11 = TREE_INT_CST_LOW (arg11);
7275 /* Move min of absolute values to int11. */
7276 if (absu_hwi (int01) < absu_hwi (int11))
7278 tmp = int01, int01 = int11, int11 = tmp;
7279 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7280 maybe_same = arg01;
7281 swap = true;
7283 else
7284 maybe_same = arg11;
7286 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7287 /* The remainder should not be a constant, otherwise we
7288 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7289 increased the number of multiplications necessary. */
7290 && TREE_CODE (arg10) != INTEGER_CST)
7292 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7293 build_int_cst (TREE_TYPE (arg00),
7294 int01 / int11));
7295 alt1 = arg10;
7296 same = maybe_same;
7297 if (swap)
7298 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7302 if (same)
7303 return fold_build2_loc (loc, MULT_EXPR, type,
7304 fold_build2_loc (loc, code, type,
7305 fold_convert_loc (loc, type, alt0),
7306 fold_convert_loc (loc, type, alt1)),
7307 fold_convert_loc (loc, type, same));
7309 return NULL_TREE;
7312 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7313 specified by EXPR into the buffer PTR of length LEN bytes.
7314 Return the number of bytes placed in the buffer, or zero
7315 upon failure. */
7317 static int
7318 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7320 tree type = TREE_TYPE (expr);
7321 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7322 int byte, offset, word, words;
7323 unsigned char value;
7325 if (total_bytes > len)
7326 return 0;
7327 words = total_bytes / UNITS_PER_WORD;
7329 for (byte = 0; byte < total_bytes; byte++)
7331 int bitpos = byte * BITS_PER_UNIT;
7332 if (bitpos < HOST_BITS_PER_WIDE_INT)
7333 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7334 else
7335 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7336 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7338 if (total_bytes > UNITS_PER_WORD)
7340 word = byte / UNITS_PER_WORD;
7341 if (WORDS_BIG_ENDIAN)
7342 word = (words - 1) - word;
7343 offset = word * UNITS_PER_WORD;
7344 if (BYTES_BIG_ENDIAN)
7345 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7346 else
7347 offset += byte % UNITS_PER_WORD;
7349 else
7350 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7351 ptr[offset] = value;
7353 return total_bytes;
7357 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7358 specified by EXPR into the buffer PTR of length LEN bytes.
7359 Return the number of bytes placed in the buffer, or zero
7360 upon failure. */
7362 static int
7363 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7365 tree type = TREE_TYPE (expr);
7366 enum machine_mode mode = TYPE_MODE (type);
7367 int total_bytes = GET_MODE_SIZE (mode);
7368 FIXED_VALUE_TYPE value;
7369 tree i_value, i_type;
7371 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7372 return 0;
7374 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7376 if (NULL_TREE == i_type
7377 || TYPE_PRECISION (i_type) != total_bytes)
7378 return 0;
7380 value = TREE_FIXED_CST (expr);
7381 i_value = double_int_to_tree (i_type, value.data);
7383 return native_encode_int (i_value, ptr, len);
7387 /* Subroutine of native_encode_expr. Encode the REAL_CST
7388 specified by EXPR into the buffer PTR of length LEN bytes.
7389 Return the number of bytes placed in the buffer, or zero
7390 upon failure. */
7392 static int
7393 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7395 tree type = TREE_TYPE (expr);
7396 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7397 int byte, offset, word, words, bitpos;
7398 unsigned char value;
7400 /* There are always 32 bits in each long, no matter the size of
7401 the hosts long. We handle floating point representations with
7402 up to 192 bits. */
7403 long tmp[6];
7405 if (total_bytes > len)
7406 return 0;
7407 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7409 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7411 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7412 bitpos += BITS_PER_UNIT)
7414 byte = (bitpos / BITS_PER_UNIT) & 3;
7415 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7417 if (UNITS_PER_WORD < 4)
7419 word = byte / UNITS_PER_WORD;
7420 if (WORDS_BIG_ENDIAN)
7421 word = (words - 1) - word;
7422 offset = word * UNITS_PER_WORD;
7423 if (BYTES_BIG_ENDIAN)
7424 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7425 else
7426 offset += byte % UNITS_PER_WORD;
7428 else
7429 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7430 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7432 return total_bytes;
7435 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7436 specified by EXPR into the buffer PTR of length LEN bytes.
7437 Return the number of bytes placed in the buffer, or zero
7438 upon failure. */
7440 static int
7441 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7443 int rsize, isize;
7444 tree part;
7446 part = TREE_REALPART (expr);
7447 rsize = native_encode_expr (part, ptr, len);
7448 if (rsize == 0)
7449 return 0;
7450 part = TREE_IMAGPART (expr);
7451 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7452 if (isize != rsize)
7453 return 0;
7454 return rsize + isize;
7458 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7459 specified by EXPR into the buffer PTR of length LEN bytes.
7460 Return the number of bytes placed in the buffer, or zero
7461 upon failure. */
7463 static int
7464 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7466 unsigned i, count;
7467 int size, offset;
7468 tree itype, elem;
7470 offset = 0;
7471 count = VECTOR_CST_NELTS (expr);
7472 itype = TREE_TYPE (TREE_TYPE (expr));
7473 size = GET_MODE_SIZE (TYPE_MODE (itype));
7474 for (i = 0; i < count; i++)
7476 elem = VECTOR_CST_ELT (expr, i);
7477 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7478 return 0;
7479 offset += size;
7481 return offset;
7485 /* Subroutine of native_encode_expr. Encode the STRING_CST
7486 specified by EXPR into the buffer PTR of length LEN bytes.
7487 Return the number of bytes placed in the buffer, or zero
7488 upon failure. */
7490 static int
7491 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7493 tree type = TREE_TYPE (expr);
7494 HOST_WIDE_INT total_bytes;
7496 if (TREE_CODE (type) != ARRAY_TYPE
7497 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7498 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7499 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7500 return 0;
7501 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7502 if (total_bytes > len)
7503 return 0;
7504 if (TREE_STRING_LENGTH (expr) < total_bytes)
7506 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7507 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7508 total_bytes - TREE_STRING_LENGTH (expr));
7510 else
7511 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7512 return total_bytes;
7516 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7517 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7518 buffer PTR of length LEN bytes. Return the number of bytes
7519 placed in the buffer, or zero upon failure. */
7522 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7524 switch (TREE_CODE (expr))
7526 case INTEGER_CST:
7527 return native_encode_int (expr, ptr, len);
7529 case REAL_CST:
7530 return native_encode_real (expr, ptr, len);
7532 case FIXED_CST:
7533 return native_encode_fixed (expr, ptr, len);
7535 case COMPLEX_CST:
7536 return native_encode_complex (expr, ptr, len);
7538 case VECTOR_CST:
7539 return native_encode_vector (expr, ptr, len);
7541 case STRING_CST:
7542 return native_encode_string (expr, ptr, len);
7544 default:
7545 return 0;
7550 /* Subroutine of native_interpret_expr. Interpret the contents of
7551 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7552 If the buffer cannot be interpreted, return NULL_TREE. */
7554 static tree
7555 native_interpret_int (tree type, const unsigned char *ptr, int len)
7557 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7558 double_int result;
7560 if (total_bytes > len
7561 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7562 return NULL_TREE;
7564 result = double_int::from_buffer (ptr, total_bytes);
7566 return double_int_to_tree (type, result);
7570 /* Subroutine of native_interpret_expr. Interpret the contents of
7571 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7572 If the buffer cannot be interpreted, return NULL_TREE. */
7574 static tree
7575 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7577 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7578 double_int result;
7579 FIXED_VALUE_TYPE fixed_value;
7581 if (total_bytes > len
7582 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7583 return NULL_TREE;
7585 result = double_int::from_buffer (ptr, total_bytes);
7586 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7588 return build_fixed (type, fixed_value);
7592 /* Subroutine of native_interpret_expr. Interpret the contents of
7593 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7594 If the buffer cannot be interpreted, return NULL_TREE. */
7596 static tree
7597 native_interpret_real (tree type, const unsigned char *ptr, int len)
7599 enum machine_mode mode = TYPE_MODE (type);
7600 int total_bytes = GET_MODE_SIZE (mode);
7601 int byte, offset, word, words, bitpos;
7602 unsigned char value;
7603 /* There are always 32 bits in each long, no matter the size of
7604 the hosts long. We handle floating point representations with
7605 up to 192 bits. */
7606 REAL_VALUE_TYPE r;
7607 long tmp[6];
7609 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7610 if (total_bytes > len || total_bytes > 24)
7611 return NULL_TREE;
7612 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7614 memset (tmp, 0, sizeof (tmp));
7615 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7616 bitpos += BITS_PER_UNIT)
7618 byte = (bitpos / BITS_PER_UNIT) & 3;
7619 if (UNITS_PER_WORD < 4)
7621 word = byte / UNITS_PER_WORD;
7622 if (WORDS_BIG_ENDIAN)
7623 word = (words - 1) - word;
7624 offset = word * UNITS_PER_WORD;
7625 if (BYTES_BIG_ENDIAN)
7626 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7627 else
7628 offset += byte % UNITS_PER_WORD;
7630 else
7631 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7632 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7634 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7637 real_from_target (&r, tmp, mode);
7638 return build_real (type, r);
7642 /* Subroutine of native_interpret_expr. Interpret the contents of
7643 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7644 If the buffer cannot be interpreted, return NULL_TREE. */
7646 static tree
7647 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7649 tree etype, rpart, ipart;
7650 int size;
7652 etype = TREE_TYPE (type);
7653 size = GET_MODE_SIZE (TYPE_MODE (etype));
7654 if (size * 2 > len)
7655 return NULL_TREE;
7656 rpart = native_interpret_expr (etype, ptr, size);
7657 if (!rpart)
7658 return NULL_TREE;
7659 ipart = native_interpret_expr (etype, ptr+size, size);
7660 if (!ipart)
7661 return NULL_TREE;
7662 return build_complex (type, rpart, ipart);
7666 /* Subroutine of native_interpret_expr. Interpret the contents of
7667 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7668 If the buffer cannot be interpreted, return NULL_TREE. */
7670 static tree
7671 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7673 tree etype, elem;
7674 int i, size, count;
7675 tree *elements;
7677 etype = TREE_TYPE (type);
7678 size = GET_MODE_SIZE (TYPE_MODE (etype));
7679 count = TYPE_VECTOR_SUBPARTS (type);
7680 if (size * count > len)
7681 return NULL_TREE;
7683 elements = XALLOCAVEC (tree, count);
7684 for (i = count - 1; i >= 0; i--)
7686 elem = native_interpret_expr (etype, ptr+(i*size), size);
7687 if (!elem)
7688 return NULL_TREE;
7689 elements[i] = elem;
7691 return build_vector (type, elements);
7695 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7696 the buffer PTR of length LEN as a constant of type TYPE. For
7697 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7698 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7699 return NULL_TREE. */
7701 tree
7702 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7704 switch (TREE_CODE (type))
7706 case INTEGER_TYPE:
7707 case ENUMERAL_TYPE:
7708 case BOOLEAN_TYPE:
7709 case POINTER_TYPE:
7710 case REFERENCE_TYPE:
7711 return native_interpret_int (type, ptr, len);
7713 case REAL_TYPE:
7714 return native_interpret_real (type, ptr, len);
7716 case FIXED_POINT_TYPE:
7717 return native_interpret_fixed (type, ptr, len);
7719 case COMPLEX_TYPE:
7720 return native_interpret_complex (type, ptr, len);
7722 case VECTOR_TYPE:
7723 return native_interpret_vector (type, ptr, len);
7725 default:
7726 return NULL_TREE;
7730 /* Returns true if we can interpret the contents of a native encoding
7731 as TYPE. */
7733 static bool
7734 can_native_interpret_type_p (tree type)
7736 switch (TREE_CODE (type))
7738 case INTEGER_TYPE:
7739 case ENUMERAL_TYPE:
7740 case BOOLEAN_TYPE:
7741 case POINTER_TYPE:
7742 case REFERENCE_TYPE:
7743 case FIXED_POINT_TYPE:
7744 case REAL_TYPE:
7745 case COMPLEX_TYPE:
7746 case VECTOR_TYPE:
7747 return true;
7748 default:
7749 return false;
7753 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7754 TYPE at compile-time. If we're unable to perform the conversion
7755 return NULL_TREE. */
7757 static tree
7758 fold_view_convert_expr (tree type, tree expr)
7760 /* We support up to 512-bit values (for V8DFmode). */
7761 unsigned char buffer[64];
7762 int len;
7764 /* Check that the host and target are sane. */
7765 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7766 return NULL_TREE;
7768 len = native_encode_expr (expr, buffer, sizeof (buffer));
7769 if (len == 0)
7770 return NULL_TREE;
7772 return native_interpret_expr (type, buffer, len);
7775 /* Build an expression for the address of T. Folds away INDIRECT_REF
7776 to avoid confusing the gimplify process. */
7778 tree
7779 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7781 /* The size of the object is not relevant when talking about its address. */
7782 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7783 t = TREE_OPERAND (t, 0);
7785 if (TREE_CODE (t) == INDIRECT_REF)
7787 t = TREE_OPERAND (t, 0);
7789 if (TREE_TYPE (t) != ptrtype)
7790 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7792 else if (TREE_CODE (t) == MEM_REF
7793 && integer_zerop (TREE_OPERAND (t, 1)))
7794 return TREE_OPERAND (t, 0);
7795 else if (TREE_CODE (t) == MEM_REF
7796 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7797 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7798 TREE_OPERAND (t, 0),
7799 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7800 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7802 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7804 if (TREE_TYPE (t) != ptrtype)
7805 t = fold_convert_loc (loc, ptrtype, t);
7807 else
7808 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7810 return t;
7813 /* Build an expression for the address of T. */
7815 tree
7816 build_fold_addr_expr_loc (location_t loc, tree t)
7818 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7820 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7823 static bool vec_cst_ctor_to_array (tree, tree *);
7825 /* Fold a unary expression of code CODE and type TYPE with operand
7826 OP0. Return the folded expression if folding is successful.
7827 Otherwise, return NULL_TREE. */
7829 tree
7830 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7832 tree tem;
7833 tree arg0;
7834 enum tree_code_class kind = TREE_CODE_CLASS (code);
7836 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7837 && TREE_CODE_LENGTH (code) == 1);
7839 arg0 = op0;
7840 if (arg0)
7842 if (CONVERT_EXPR_CODE_P (code)
7843 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7845 /* Don't use STRIP_NOPS, because signedness of argument type
7846 matters. */
7847 STRIP_SIGN_NOPS (arg0);
7849 else
7851 /* Strip any conversions that don't change the mode. This
7852 is safe for every expression, except for a comparison
7853 expression because its signedness is derived from its
7854 operands.
7856 Note that this is done as an internal manipulation within
7857 the constant folder, in order to find the simplest
7858 representation of the arguments so that their form can be
7859 studied. In any cases, the appropriate type conversions
7860 should be put back in the tree that will get out of the
7861 constant folder. */
7862 STRIP_NOPS (arg0);
7866 if (TREE_CODE_CLASS (code) == tcc_unary)
7868 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7869 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7870 fold_build1_loc (loc, code, type,
7871 fold_convert_loc (loc, TREE_TYPE (op0),
7872 TREE_OPERAND (arg0, 1))));
7873 else if (TREE_CODE (arg0) == COND_EXPR)
7875 tree arg01 = TREE_OPERAND (arg0, 1);
7876 tree arg02 = TREE_OPERAND (arg0, 2);
7877 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7878 arg01 = fold_build1_loc (loc, code, type,
7879 fold_convert_loc (loc,
7880 TREE_TYPE (op0), arg01));
7881 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7882 arg02 = fold_build1_loc (loc, code, type,
7883 fold_convert_loc (loc,
7884 TREE_TYPE (op0), arg02));
7885 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7886 arg01, arg02);
7888 /* If this was a conversion, and all we did was to move into
7889 inside the COND_EXPR, bring it back out. But leave it if
7890 it is a conversion from integer to integer and the
7891 result precision is no wider than a word since such a
7892 conversion is cheap and may be optimized away by combine,
7893 while it couldn't if it were outside the COND_EXPR. Then return
7894 so we don't get into an infinite recursion loop taking the
7895 conversion out and then back in. */
7897 if ((CONVERT_EXPR_CODE_P (code)
7898 || code == NON_LVALUE_EXPR)
7899 && TREE_CODE (tem) == COND_EXPR
7900 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7901 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7902 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7903 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7904 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7905 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7906 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7907 && (INTEGRAL_TYPE_P
7908 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7909 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7910 || flag_syntax_only))
7911 tem = build1_loc (loc, code, type,
7912 build3 (COND_EXPR,
7913 TREE_TYPE (TREE_OPERAND
7914 (TREE_OPERAND (tem, 1), 0)),
7915 TREE_OPERAND (tem, 0),
7916 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7917 TREE_OPERAND (TREE_OPERAND (tem, 2),
7918 0)));
7919 return tem;
7923 switch (code)
7925 case PAREN_EXPR:
7926 /* Re-association barriers around constants and other re-association
7927 barriers can be removed. */
7928 if (CONSTANT_CLASS_P (op0)
7929 || TREE_CODE (op0) == PAREN_EXPR)
7930 return fold_convert_loc (loc, type, op0);
7931 return NULL_TREE;
7933 CASE_CONVERT:
7934 case FLOAT_EXPR:
7935 case FIX_TRUNC_EXPR:
7936 if (TREE_TYPE (op0) == type)
7937 return op0;
7939 if (COMPARISON_CLASS_P (op0))
7941 /* If we have (type) (a CMP b) and type is an integral type, return
7942 new expression involving the new type. Canonicalize
7943 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7944 non-integral type.
7945 Do not fold the result as that would not simplify further, also
7946 folding again results in recursions. */
7947 if (TREE_CODE (type) == BOOLEAN_TYPE)
7948 return build2_loc (loc, TREE_CODE (op0), type,
7949 TREE_OPERAND (op0, 0),
7950 TREE_OPERAND (op0, 1));
7951 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7952 && TREE_CODE (type) != VECTOR_TYPE)
7953 return build3_loc (loc, COND_EXPR, type, op0,
7954 constant_boolean_node (true, type),
7955 constant_boolean_node (false, type));
7958 /* Handle cases of two conversions in a row. */
7959 if (CONVERT_EXPR_P (op0))
7961 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7962 tree inter_type = TREE_TYPE (op0);
7963 int inside_int = INTEGRAL_TYPE_P (inside_type);
7964 int inside_ptr = POINTER_TYPE_P (inside_type);
7965 int inside_float = FLOAT_TYPE_P (inside_type);
7966 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7967 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7968 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7969 int inter_int = INTEGRAL_TYPE_P (inter_type);
7970 int inter_ptr = POINTER_TYPE_P (inter_type);
7971 int inter_float = FLOAT_TYPE_P (inter_type);
7972 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7973 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7974 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7975 int final_int = INTEGRAL_TYPE_P (type);
7976 int final_ptr = POINTER_TYPE_P (type);
7977 int final_float = FLOAT_TYPE_P (type);
7978 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7979 unsigned int final_prec = TYPE_PRECISION (type);
7980 int final_unsignedp = TYPE_UNSIGNED (type);
7982 /* In addition to the cases of two conversions in a row
7983 handled below, if we are converting something to its own
7984 type via an object of identical or wider precision, neither
7985 conversion is needed. */
7986 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7987 && (((inter_int || inter_ptr) && final_int)
7988 || (inter_float && final_float))
7989 && inter_prec >= final_prec)
7990 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7992 /* Likewise, if the intermediate and initial types are either both
7993 float or both integer, we don't need the middle conversion if the
7994 former is wider than the latter and doesn't change the signedness
7995 (for integers). Avoid this if the final type is a pointer since
7996 then we sometimes need the middle conversion. Likewise if the
7997 final type has a precision not equal to the size of its mode. */
7998 if (((inter_int && inside_int)
7999 || (inter_float && inside_float)
8000 || (inter_vec && inside_vec))
8001 && inter_prec >= inside_prec
8002 && (inter_float || inter_vec
8003 || inter_unsignedp == inside_unsignedp)
8004 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8005 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8006 && ! final_ptr
8007 && (! final_vec || inter_prec == inside_prec))
8008 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8010 /* If we have a sign-extension of a zero-extended value, we can
8011 replace that by a single zero-extension. Likewise if the
8012 final conversion does not change precision we can drop the
8013 intermediate conversion. */
8014 if (inside_int && inter_int && final_int
8015 && ((inside_prec < inter_prec && inter_prec < final_prec
8016 && inside_unsignedp && !inter_unsignedp)
8017 || final_prec == inter_prec))
8018 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8020 /* Two conversions in a row are not needed unless:
8021 - some conversion is floating-point (overstrict for now), or
8022 - some conversion is a vector (overstrict for now), or
8023 - the intermediate type is narrower than both initial and
8024 final, or
8025 - the intermediate type and innermost type differ in signedness,
8026 and the outermost type is wider than the intermediate, or
8027 - the initial type is a pointer type and the precisions of the
8028 intermediate and final types differ, or
8029 - the final type is a pointer type and the precisions of the
8030 initial and intermediate types differ. */
8031 if (! inside_float && ! inter_float && ! final_float
8032 && ! inside_vec && ! inter_vec && ! final_vec
8033 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8034 && ! (inside_int && inter_int
8035 && inter_unsignedp != inside_unsignedp
8036 && inter_prec < final_prec)
8037 && ((inter_unsignedp && inter_prec > inside_prec)
8038 == (final_unsignedp && final_prec > inter_prec))
8039 && ! (inside_ptr && inter_prec != final_prec)
8040 && ! (final_ptr && inside_prec != inter_prec)
8041 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8042 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8043 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8046 /* Handle (T *)&A.B.C for A being of type T and B and C
8047 living at offset zero. This occurs frequently in
8048 C++ upcasting and then accessing the base. */
8049 if (TREE_CODE (op0) == ADDR_EXPR
8050 && POINTER_TYPE_P (type)
8051 && handled_component_p (TREE_OPERAND (op0, 0)))
8053 HOST_WIDE_INT bitsize, bitpos;
8054 tree offset;
8055 enum machine_mode mode;
8056 int unsignedp, volatilep;
8057 tree base = TREE_OPERAND (op0, 0);
8058 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8059 &mode, &unsignedp, &volatilep, false);
8060 /* If the reference was to a (constant) zero offset, we can use
8061 the address of the base if it has the same base type
8062 as the result type and the pointer type is unqualified. */
8063 if (! offset && bitpos == 0
8064 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8065 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8066 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8067 return fold_convert_loc (loc, type,
8068 build_fold_addr_expr_loc (loc, base));
8071 if (TREE_CODE (op0) == MODIFY_EXPR
8072 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8073 /* Detect assigning a bitfield. */
8074 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8075 && DECL_BIT_FIELD
8076 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8078 /* Don't leave an assignment inside a conversion
8079 unless assigning a bitfield. */
8080 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8081 /* First do the assignment, then return converted constant. */
8082 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8083 TREE_NO_WARNING (tem) = 1;
8084 TREE_USED (tem) = 1;
8085 return tem;
8088 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8089 constants (if x has signed type, the sign bit cannot be set
8090 in c). This folds extension into the BIT_AND_EXPR.
8091 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8092 very likely don't have maximal range for their precision and this
8093 transformation effectively doesn't preserve non-maximal ranges. */
8094 if (TREE_CODE (type) == INTEGER_TYPE
8095 && TREE_CODE (op0) == BIT_AND_EXPR
8096 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8098 tree and_expr = op0;
8099 tree and0 = TREE_OPERAND (and_expr, 0);
8100 tree and1 = TREE_OPERAND (and_expr, 1);
8101 int change = 0;
8103 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8104 || (TYPE_PRECISION (type)
8105 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8106 change = 1;
8107 else if (TYPE_PRECISION (TREE_TYPE (and1))
8108 <= HOST_BITS_PER_WIDE_INT
8109 && host_integerp (and1, 1))
8111 unsigned HOST_WIDE_INT cst;
8113 cst = tree_low_cst (and1, 1);
8114 cst &= HOST_WIDE_INT_M1U
8115 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8116 change = (cst == 0);
8117 #ifdef LOAD_EXTEND_OP
8118 if (change
8119 && !flag_syntax_only
8120 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8121 == ZERO_EXTEND))
8123 tree uns = unsigned_type_for (TREE_TYPE (and0));
8124 and0 = fold_convert_loc (loc, uns, and0);
8125 and1 = fold_convert_loc (loc, uns, and1);
8127 #endif
8129 if (change)
8131 tem = force_fit_type_double (type, tree_to_double_int (and1),
8132 0, TREE_OVERFLOW (and1));
8133 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8134 fold_convert_loc (loc, type, and0), tem);
8138 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8139 when one of the new casts will fold away. Conservatively we assume
8140 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8141 if (POINTER_TYPE_P (type)
8142 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8143 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8144 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8145 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8146 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8148 tree arg00 = TREE_OPERAND (arg0, 0);
8149 tree arg01 = TREE_OPERAND (arg0, 1);
8151 return fold_build_pointer_plus_loc
8152 (loc, fold_convert_loc (loc, type, arg00), arg01);
8155 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8156 of the same precision, and X is an integer type not narrower than
8157 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8158 if (INTEGRAL_TYPE_P (type)
8159 && TREE_CODE (op0) == BIT_NOT_EXPR
8160 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8161 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8162 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8164 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8165 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8166 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8167 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8168 fold_convert_loc (loc, type, tem));
8171 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8172 type of X and Y (integer types only). */
8173 if (INTEGRAL_TYPE_P (type)
8174 && TREE_CODE (op0) == MULT_EXPR
8175 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8176 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8178 /* Be careful not to introduce new overflows. */
8179 tree mult_type;
8180 if (TYPE_OVERFLOW_WRAPS (type))
8181 mult_type = type;
8182 else
8183 mult_type = unsigned_type_for (type);
8185 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8187 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8188 fold_convert_loc (loc, mult_type,
8189 TREE_OPERAND (op0, 0)),
8190 fold_convert_loc (loc, mult_type,
8191 TREE_OPERAND (op0, 1)));
8192 return fold_convert_loc (loc, type, tem);
8196 tem = fold_convert_const (code, type, op0);
8197 return tem ? tem : NULL_TREE;
8199 case ADDR_SPACE_CONVERT_EXPR:
8200 if (integer_zerop (arg0))
8201 return fold_convert_const (code, type, arg0);
8202 return NULL_TREE;
8204 case FIXED_CONVERT_EXPR:
8205 tem = fold_convert_const (code, type, arg0);
8206 return tem ? tem : NULL_TREE;
8208 case VIEW_CONVERT_EXPR:
8209 if (TREE_TYPE (op0) == type)
8210 return op0;
8211 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8212 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8213 type, TREE_OPERAND (op0, 0));
8214 if (TREE_CODE (op0) == MEM_REF)
8215 return fold_build2_loc (loc, MEM_REF, type,
8216 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8218 /* For integral conversions with the same precision or pointer
8219 conversions use a NOP_EXPR instead. */
8220 if ((INTEGRAL_TYPE_P (type)
8221 || POINTER_TYPE_P (type))
8222 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8223 || POINTER_TYPE_P (TREE_TYPE (op0)))
8224 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8225 return fold_convert_loc (loc, type, op0);
8227 /* Strip inner integral conversions that do not change the precision. */
8228 if (CONVERT_EXPR_P (op0)
8229 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8230 || POINTER_TYPE_P (TREE_TYPE (op0)))
8231 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8232 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8233 && (TYPE_PRECISION (TREE_TYPE (op0))
8234 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8235 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8236 type, TREE_OPERAND (op0, 0));
8238 return fold_view_convert_expr (type, op0);
8240 case NEGATE_EXPR:
8241 tem = fold_negate_expr (loc, arg0);
8242 if (tem)
8243 return fold_convert_loc (loc, type, tem);
8244 return NULL_TREE;
8246 case ABS_EXPR:
8247 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8248 return fold_abs_const (arg0, type);
8249 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8250 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8251 /* Convert fabs((double)float) into (double)fabsf(float). */
8252 else if (TREE_CODE (arg0) == NOP_EXPR
8253 && TREE_CODE (type) == REAL_TYPE)
8255 tree targ0 = strip_float_extensions (arg0);
8256 if (targ0 != arg0)
8257 return fold_convert_loc (loc, type,
8258 fold_build1_loc (loc, ABS_EXPR,
8259 TREE_TYPE (targ0),
8260 targ0));
8262 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8263 else if (TREE_CODE (arg0) == ABS_EXPR)
8264 return arg0;
8265 else if (tree_expr_nonnegative_p (arg0))
8266 return arg0;
8268 /* Strip sign ops from argument. */
8269 if (TREE_CODE (type) == REAL_TYPE)
8271 tem = fold_strip_sign_ops (arg0);
8272 if (tem)
8273 return fold_build1_loc (loc, ABS_EXPR, type,
8274 fold_convert_loc (loc, type, tem));
8276 return NULL_TREE;
8278 case CONJ_EXPR:
8279 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8280 return fold_convert_loc (loc, type, arg0);
8281 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8283 tree itype = TREE_TYPE (type);
8284 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8285 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8286 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8287 negate_expr (ipart));
8289 if (TREE_CODE (arg0) == COMPLEX_CST)
8291 tree itype = TREE_TYPE (type);
8292 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8293 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8294 return build_complex (type, rpart, negate_expr (ipart));
8296 if (TREE_CODE (arg0) == CONJ_EXPR)
8297 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8298 return NULL_TREE;
8300 case BIT_NOT_EXPR:
8301 if (TREE_CODE (arg0) == INTEGER_CST)
8302 return fold_not_const (arg0, type);
8303 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8304 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8305 /* Convert ~ (-A) to A - 1. */
8306 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8307 return fold_build2_loc (loc, MINUS_EXPR, type,
8308 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8309 build_int_cst (type, 1));
8310 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8311 else if (INTEGRAL_TYPE_P (type)
8312 && ((TREE_CODE (arg0) == MINUS_EXPR
8313 && integer_onep (TREE_OPERAND (arg0, 1)))
8314 || (TREE_CODE (arg0) == PLUS_EXPR
8315 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8316 return fold_build1_loc (loc, NEGATE_EXPR, type,
8317 fold_convert_loc (loc, type,
8318 TREE_OPERAND (arg0, 0)));
8319 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8320 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8321 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8322 fold_convert_loc (loc, type,
8323 TREE_OPERAND (arg0, 0)))))
8324 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8325 fold_convert_loc (loc, type,
8326 TREE_OPERAND (arg0, 1)));
8327 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8328 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8329 fold_convert_loc (loc, type,
8330 TREE_OPERAND (arg0, 1)))))
8331 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8332 fold_convert_loc (loc, type,
8333 TREE_OPERAND (arg0, 0)), tem);
8334 /* Perform BIT_NOT_EXPR on each element individually. */
8335 else if (TREE_CODE (arg0) == VECTOR_CST)
8337 tree *elements;
8338 tree elem;
8339 unsigned count = VECTOR_CST_NELTS (arg0), i;
8341 elements = XALLOCAVEC (tree, count);
8342 for (i = 0; i < count; i++)
8344 elem = VECTOR_CST_ELT (arg0, i);
8345 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8346 if (elem == NULL_TREE)
8347 break;
8348 elements[i] = elem;
8350 if (i == count)
8351 return build_vector (type, elements);
8353 else if (COMPARISON_CLASS_P (arg0)
8354 && (VECTOR_TYPE_P (type)
8355 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8357 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8358 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8359 HONOR_NANS (TYPE_MODE (op_type)));
8360 if (subcode != ERROR_MARK)
8361 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8362 TREE_OPERAND (arg0, 1));
8366 return NULL_TREE;
8368 case TRUTH_NOT_EXPR:
8369 /* Note that the operand of this must be an int
8370 and its values must be 0 or 1.
8371 ("true" is a fixed value perhaps depending on the language,
8372 but we don't handle values other than 1 correctly yet.) */
8373 tem = fold_truth_not_expr (loc, arg0);
8374 if (!tem)
8375 return NULL_TREE;
8376 return fold_convert_loc (loc, type, tem);
8378 case REALPART_EXPR:
8379 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8380 return fold_convert_loc (loc, type, arg0);
8381 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8382 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8383 TREE_OPERAND (arg0, 1));
8384 if (TREE_CODE (arg0) == COMPLEX_CST)
8385 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8386 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8388 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8389 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8390 fold_build1_loc (loc, REALPART_EXPR, itype,
8391 TREE_OPERAND (arg0, 0)),
8392 fold_build1_loc (loc, REALPART_EXPR, itype,
8393 TREE_OPERAND (arg0, 1)));
8394 return fold_convert_loc (loc, type, tem);
8396 if (TREE_CODE (arg0) == CONJ_EXPR)
8398 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8399 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8400 TREE_OPERAND (arg0, 0));
8401 return fold_convert_loc (loc, type, tem);
8403 if (TREE_CODE (arg0) == CALL_EXPR)
8405 tree fn = get_callee_fndecl (arg0);
8406 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8407 switch (DECL_FUNCTION_CODE (fn))
8409 CASE_FLT_FN (BUILT_IN_CEXPI):
8410 fn = mathfn_built_in (type, BUILT_IN_COS);
8411 if (fn)
8412 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8413 break;
8415 default:
8416 break;
8419 return NULL_TREE;
8421 case IMAGPART_EXPR:
8422 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8423 return build_zero_cst (type);
8424 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8425 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8426 TREE_OPERAND (arg0, 0));
8427 if (TREE_CODE (arg0) == COMPLEX_CST)
8428 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8429 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8431 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8432 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8433 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8434 TREE_OPERAND (arg0, 0)),
8435 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8436 TREE_OPERAND (arg0, 1)));
8437 return fold_convert_loc (loc, type, tem);
8439 if (TREE_CODE (arg0) == CONJ_EXPR)
8441 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8442 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8443 return fold_convert_loc (loc, type, negate_expr (tem));
8445 if (TREE_CODE (arg0) == CALL_EXPR)
8447 tree fn = get_callee_fndecl (arg0);
8448 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8449 switch (DECL_FUNCTION_CODE (fn))
8451 CASE_FLT_FN (BUILT_IN_CEXPI):
8452 fn = mathfn_built_in (type, BUILT_IN_SIN);
8453 if (fn)
8454 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8455 break;
8457 default:
8458 break;
8461 return NULL_TREE;
8463 case INDIRECT_REF:
8464 /* Fold *&X to X if X is an lvalue. */
8465 if (TREE_CODE (op0) == ADDR_EXPR)
8467 tree op00 = TREE_OPERAND (op0, 0);
8468 if ((TREE_CODE (op00) == VAR_DECL
8469 || TREE_CODE (op00) == PARM_DECL
8470 || TREE_CODE (op00) == RESULT_DECL)
8471 && !TREE_READONLY (op00))
8472 return op00;
8474 return NULL_TREE;
8476 case VEC_UNPACK_LO_EXPR:
8477 case VEC_UNPACK_HI_EXPR:
8478 case VEC_UNPACK_FLOAT_LO_EXPR:
8479 case VEC_UNPACK_FLOAT_HI_EXPR:
8481 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8482 tree *elts;
8483 enum tree_code subcode;
8485 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8486 if (TREE_CODE (arg0) != VECTOR_CST)
8487 return NULL_TREE;
8489 elts = XALLOCAVEC (tree, nelts * 2);
8490 if (!vec_cst_ctor_to_array (arg0, elts))
8491 return NULL_TREE;
8493 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8494 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8495 elts += nelts;
8497 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8498 subcode = NOP_EXPR;
8499 else
8500 subcode = FLOAT_EXPR;
8502 for (i = 0; i < nelts; i++)
8504 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8505 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8506 return NULL_TREE;
8509 return build_vector (type, elts);
8512 case REDUC_MIN_EXPR:
8513 case REDUC_MAX_EXPR:
8514 case REDUC_PLUS_EXPR:
8516 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8517 tree *elts;
8518 enum tree_code subcode;
8520 if (TREE_CODE (op0) != VECTOR_CST)
8521 return NULL_TREE;
8523 elts = XALLOCAVEC (tree, nelts);
8524 if (!vec_cst_ctor_to_array (op0, elts))
8525 return NULL_TREE;
8527 switch (code)
8529 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8530 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8531 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8532 default: gcc_unreachable ();
8535 for (i = 1; i < nelts; i++)
8537 elts[0] = const_binop (subcode, elts[0], elts[i]);
8538 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8539 return NULL_TREE;
8540 elts[i] = build_zero_cst (TREE_TYPE (type));
8543 return build_vector (type, elts);
8546 default:
8547 return NULL_TREE;
8548 } /* switch (code) */
8552 /* If the operation was a conversion do _not_ mark a resulting constant
8553 with TREE_OVERFLOW if the original constant was not. These conversions
8554 have implementation defined behavior and retaining the TREE_OVERFLOW
8555 flag here would confuse later passes such as VRP. */
8556 tree
8557 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8558 tree type, tree op0)
8560 tree res = fold_unary_loc (loc, code, type, op0);
8561 if (res
8562 && TREE_CODE (res) == INTEGER_CST
8563 && TREE_CODE (op0) == INTEGER_CST
8564 && CONVERT_EXPR_CODE_P (code))
8565 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8567 return res;
8570 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8571 operands OP0 and OP1. LOC is the location of the resulting expression.
8572 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8573 Return the folded expression if folding is successful. Otherwise,
8574 return NULL_TREE. */
8575 static tree
8576 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8577 tree arg0, tree arg1, tree op0, tree op1)
8579 tree tem;
8581 /* We only do these simplifications if we are optimizing. */
8582 if (!optimize)
8583 return NULL_TREE;
8585 /* Check for things like (A || B) && (A || C). We can convert this
8586 to A || (B && C). Note that either operator can be any of the four
8587 truth and/or operations and the transformation will still be
8588 valid. Also note that we only care about order for the
8589 ANDIF and ORIF operators. If B contains side effects, this
8590 might change the truth-value of A. */
8591 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8592 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8593 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8594 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8595 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8596 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8598 tree a00 = TREE_OPERAND (arg0, 0);
8599 tree a01 = TREE_OPERAND (arg0, 1);
8600 tree a10 = TREE_OPERAND (arg1, 0);
8601 tree a11 = TREE_OPERAND (arg1, 1);
8602 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8603 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8604 && (code == TRUTH_AND_EXPR
8605 || code == TRUTH_OR_EXPR));
8607 if (operand_equal_p (a00, a10, 0))
8608 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8609 fold_build2_loc (loc, code, type, a01, a11));
8610 else if (commutative && operand_equal_p (a00, a11, 0))
8611 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8612 fold_build2_loc (loc, code, type, a01, a10));
8613 else if (commutative && operand_equal_p (a01, a10, 0))
8614 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8615 fold_build2_loc (loc, code, type, a00, a11));
8617 /* This case if tricky because we must either have commutative
8618 operators or else A10 must not have side-effects. */
8620 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8621 && operand_equal_p (a01, a11, 0))
8622 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8623 fold_build2_loc (loc, code, type, a00, a10),
8624 a01);
8627 /* See if we can build a range comparison. */
8628 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8629 return tem;
8631 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8632 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8634 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8635 if (tem)
8636 return fold_build2_loc (loc, code, type, tem, arg1);
8639 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8640 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8642 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8643 if (tem)
8644 return fold_build2_loc (loc, code, type, arg0, tem);
8647 /* Check for the possibility of merging component references. If our
8648 lhs is another similar operation, try to merge its rhs with our
8649 rhs. Then try to merge our lhs and rhs. */
8650 if (TREE_CODE (arg0) == code
8651 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8652 TREE_OPERAND (arg0, 1), arg1)))
8653 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8655 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8656 return tem;
8658 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8659 && (code == TRUTH_AND_EXPR
8660 || code == TRUTH_ANDIF_EXPR
8661 || code == TRUTH_OR_EXPR
8662 || code == TRUTH_ORIF_EXPR))
8664 enum tree_code ncode, icode;
8666 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8667 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8668 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8670 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8671 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8672 We don't want to pack more than two leafs to a non-IF AND/OR
8673 expression.
8674 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8675 equal to IF-CODE, then we don't want to add right-hand operand.
8676 If the inner right-hand side of left-hand operand has
8677 side-effects, or isn't simple, then we can't add to it,
8678 as otherwise we might destroy if-sequence. */
8679 if (TREE_CODE (arg0) == icode
8680 && simple_operand_p_2 (arg1)
8681 /* Needed for sequence points to handle trappings, and
8682 side-effects. */
8683 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8685 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8686 arg1);
8687 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8688 tem);
8690 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8691 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8692 else if (TREE_CODE (arg1) == icode
8693 && simple_operand_p_2 (arg0)
8694 /* Needed for sequence points to handle trappings, and
8695 side-effects. */
8696 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8698 tem = fold_build2_loc (loc, ncode, type,
8699 arg0, TREE_OPERAND (arg1, 0));
8700 return fold_build2_loc (loc, icode, type, tem,
8701 TREE_OPERAND (arg1, 1));
8703 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8704 into (A OR B).
8705 For sequence point consistancy, we need to check for trapping,
8706 and side-effects. */
8707 else if (code == icode && simple_operand_p_2 (arg0)
8708 && simple_operand_p_2 (arg1))
8709 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8712 return NULL_TREE;
8715 /* Fold a binary expression of code CODE and type TYPE with operands
8716 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8717 Return the folded expression if folding is successful. Otherwise,
8718 return NULL_TREE. */
8720 static tree
8721 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8723 enum tree_code compl_code;
8725 if (code == MIN_EXPR)
8726 compl_code = MAX_EXPR;
8727 else if (code == MAX_EXPR)
8728 compl_code = MIN_EXPR;
8729 else
8730 gcc_unreachable ();
8732 /* MIN (MAX (a, b), b) == b. */
8733 if (TREE_CODE (op0) == compl_code
8734 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8735 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8737 /* MIN (MAX (b, a), b) == b. */
8738 if (TREE_CODE (op0) == compl_code
8739 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8740 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8741 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8743 /* MIN (a, MAX (a, b)) == a. */
8744 if (TREE_CODE (op1) == compl_code
8745 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8746 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8747 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8749 /* MIN (a, MAX (b, a)) == a. */
8750 if (TREE_CODE (op1) == compl_code
8751 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8752 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8753 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8755 return NULL_TREE;
8758 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8759 by changing CODE to reduce the magnitude of constants involved in
8760 ARG0 of the comparison.
8761 Returns a canonicalized comparison tree if a simplification was
8762 possible, otherwise returns NULL_TREE.
8763 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8764 valid if signed overflow is undefined. */
8766 static tree
8767 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8768 tree arg0, tree arg1,
8769 bool *strict_overflow_p)
8771 enum tree_code code0 = TREE_CODE (arg0);
8772 tree t, cst0 = NULL_TREE;
8773 int sgn0;
8774 bool swap = false;
8776 /* Match A +- CST code arg1 and CST code arg1. We can change the
8777 first form only if overflow is undefined. */
8778 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8779 /* In principle pointers also have undefined overflow behavior,
8780 but that causes problems elsewhere. */
8781 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8782 && (code0 == MINUS_EXPR
8783 || code0 == PLUS_EXPR)
8784 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8785 || code0 == INTEGER_CST))
8786 return NULL_TREE;
8788 /* Identify the constant in arg0 and its sign. */
8789 if (code0 == INTEGER_CST)
8790 cst0 = arg0;
8791 else
8792 cst0 = TREE_OPERAND (arg0, 1);
8793 sgn0 = tree_int_cst_sgn (cst0);
8795 /* Overflowed constants and zero will cause problems. */
8796 if (integer_zerop (cst0)
8797 || TREE_OVERFLOW (cst0))
8798 return NULL_TREE;
8800 /* See if we can reduce the magnitude of the constant in
8801 arg0 by changing the comparison code. */
8802 if (code0 == INTEGER_CST)
8804 /* CST <= arg1 -> CST-1 < arg1. */
8805 if (code == LE_EXPR && sgn0 == 1)
8806 code = LT_EXPR;
8807 /* -CST < arg1 -> -CST-1 <= arg1. */
8808 else if (code == LT_EXPR && sgn0 == -1)
8809 code = LE_EXPR;
8810 /* CST > arg1 -> CST-1 >= arg1. */
8811 else if (code == GT_EXPR && sgn0 == 1)
8812 code = GE_EXPR;
8813 /* -CST >= arg1 -> -CST-1 > arg1. */
8814 else if (code == GE_EXPR && sgn0 == -1)
8815 code = GT_EXPR;
8816 else
8817 return NULL_TREE;
8818 /* arg1 code' CST' might be more canonical. */
8819 swap = true;
8821 else
8823 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8824 if (code == LT_EXPR
8825 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8826 code = LE_EXPR;
8827 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8828 else if (code == GT_EXPR
8829 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8830 code = GE_EXPR;
8831 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8832 else if (code == LE_EXPR
8833 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8834 code = LT_EXPR;
8835 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8836 else if (code == GE_EXPR
8837 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8838 code = GT_EXPR;
8839 else
8840 return NULL_TREE;
8841 *strict_overflow_p = true;
8844 /* Now build the constant reduced in magnitude. But not if that
8845 would produce one outside of its types range. */
8846 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8847 && ((sgn0 == 1
8848 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8849 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8850 || (sgn0 == -1
8851 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8852 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8853 /* We cannot swap the comparison here as that would cause us to
8854 endlessly recurse. */
8855 return NULL_TREE;
8857 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8858 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8859 if (code0 != INTEGER_CST)
8860 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8861 t = fold_convert (TREE_TYPE (arg1), t);
8863 /* If swapping might yield to a more canonical form, do so. */
8864 if (swap)
8865 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8866 else
8867 return fold_build2_loc (loc, code, type, t, arg1);
8870 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8871 overflow further. Try to decrease the magnitude of constants involved
8872 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8873 and put sole constants at the second argument position.
8874 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8876 static tree
8877 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8878 tree arg0, tree arg1)
8880 tree t;
8881 bool strict_overflow_p;
8882 const char * const warnmsg = G_("assuming signed overflow does not occur "
8883 "when reducing constant in comparison");
8885 /* Try canonicalization by simplifying arg0. */
8886 strict_overflow_p = false;
8887 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8888 &strict_overflow_p);
8889 if (t)
8891 if (strict_overflow_p)
8892 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8893 return t;
8896 /* Try canonicalization by simplifying arg1 using the swapped
8897 comparison. */
8898 code = swap_tree_comparison (code);
8899 strict_overflow_p = false;
8900 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8901 &strict_overflow_p);
8902 if (t && strict_overflow_p)
8903 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8904 return t;
8907 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8908 space. This is used to avoid issuing overflow warnings for
8909 expressions like &p->x which can not wrap. */
8911 static bool
8912 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8914 double_int di_offset, total;
8916 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8917 return true;
8919 if (bitpos < 0)
8920 return true;
8922 if (offset == NULL_TREE)
8923 di_offset = double_int_zero;
8924 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8925 return true;
8926 else
8927 di_offset = TREE_INT_CST (offset);
8929 bool overflow;
8930 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8931 total = di_offset.add_with_sign (units, true, &overflow);
8932 if (overflow)
8933 return true;
8935 if (total.high != 0)
8936 return true;
8938 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8939 if (size <= 0)
8940 return true;
8942 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8943 array. */
8944 if (TREE_CODE (base) == ADDR_EXPR)
8946 HOST_WIDE_INT base_size;
8948 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8949 if (base_size > 0 && size < base_size)
8950 size = base_size;
8953 return total.low > (unsigned HOST_WIDE_INT) size;
8956 /* Subroutine of fold_binary. This routine performs all of the
8957 transformations that are common to the equality/inequality
8958 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8959 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8960 fold_binary should call fold_binary. Fold a comparison with
8961 tree code CODE and type TYPE with operands OP0 and OP1. Return
8962 the folded comparison or NULL_TREE. */
8964 static tree
8965 fold_comparison (location_t loc, enum tree_code code, tree type,
8966 tree op0, tree op1)
8968 tree arg0, arg1, tem;
8970 arg0 = op0;
8971 arg1 = op1;
8973 STRIP_SIGN_NOPS (arg0);
8974 STRIP_SIGN_NOPS (arg1);
8976 tem = fold_relational_const (code, type, arg0, arg1);
8977 if (tem != NULL_TREE)
8978 return tem;
8980 /* If one arg is a real or integer constant, put it last. */
8981 if (tree_swap_operands_p (arg0, arg1, true))
8982 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8984 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8985 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8986 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8987 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8988 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8989 && (TREE_CODE (arg1) == INTEGER_CST
8990 && !TREE_OVERFLOW (arg1)))
8992 tree const1 = TREE_OPERAND (arg0, 1);
8993 tree const2 = arg1;
8994 tree variable = TREE_OPERAND (arg0, 0);
8995 tree lhs;
8996 int lhs_add;
8997 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8999 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9000 TREE_TYPE (arg1), const2, const1);
9002 /* If the constant operation overflowed this can be
9003 simplified as a comparison against INT_MAX/INT_MIN. */
9004 if (TREE_CODE (lhs) == INTEGER_CST
9005 && TREE_OVERFLOW (lhs))
9007 int const1_sgn = tree_int_cst_sgn (const1);
9008 enum tree_code code2 = code;
9010 /* Get the sign of the constant on the lhs if the
9011 operation were VARIABLE + CONST1. */
9012 if (TREE_CODE (arg0) == MINUS_EXPR)
9013 const1_sgn = -const1_sgn;
9015 /* The sign of the constant determines if we overflowed
9016 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9017 Canonicalize to the INT_MIN overflow by swapping the comparison
9018 if necessary. */
9019 if (const1_sgn == -1)
9020 code2 = swap_tree_comparison (code);
9022 /* We now can look at the canonicalized case
9023 VARIABLE + 1 CODE2 INT_MIN
9024 and decide on the result. */
9025 if (code2 == LT_EXPR
9026 || code2 == LE_EXPR
9027 || code2 == EQ_EXPR)
9028 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9029 else if (code2 == NE_EXPR
9030 || code2 == GE_EXPR
9031 || code2 == GT_EXPR)
9032 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9035 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9036 && (TREE_CODE (lhs) != INTEGER_CST
9037 || !TREE_OVERFLOW (lhs)))
9039 if (code != EQ_EXPR && code != NE_EXPR)
9040 fold_overflow_warning ("assuming signed overflow does not occur "
9041 "when changing X +- C1 cmp C2 to "
9042 "X cmp C1 +- C2",
9043 WARN_STRICT_OVERFLOW_COMPARISON);
9044 return fold_build2_loc (loc, code, type, variable, lhs);
9048 /* For comparisons of pointers we can decompose it to a compile time
9049 comparison of the base objects and the offsets into the object.
9050 This requires at least one operand being an ADDR_EXPR or a
9051 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9052 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9053 && (TREE_CODE (arg0) == ADDR_EXPR
9054 || TREE_CODE (arg1) == ADDR_EXPR
9055 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9056 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9058 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9059 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9060 enum machine_mode mode;
9061 int volatilep, unsignedp;
9062 bool indirect_base0 = false, indirect_base1 = false;
9064 /* Get base and offset for the access. Strip ADDR_EXPR for
9065 get_inner_reference, but put it back by stripping INDIRECT_REF
9066 off the base object if possible. indirect_baseN will be true
9067 if baseN is not an address but refers to the object itself. */
9068 base0 = arg0;
9069 if (TREE_CODE (arg0) == ADDR_EXPR)
9071 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9072 &bitsize, &bitpos0, &offset0, &mode,
9073 &unsignedp, &volatilep, false);
9074 if (TREE_CODE (base0) == INDIRECT_REF)
9075 base0 = TREE_OPERAND (base0, 0);
9076 else
9077 indirect_base0 = true;
9079 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9081 base0 = TREE_OPERAND (arg0, 0);
9082 STRIP_SIGN_NOPS (base0);
9083 if (TREE_CODE (base0) == ADDR_EXPR)
9085 base0 = TREE_OPERAND (base0, 0);
9086 indirect_base0 = true;
9088 offset0 = TREE_OPERAND (arg0, 1);
9089 if (host_integerp (offset0, 0))
9091 HOST_WIDE_INT off = size_low_cst (offset0);
9092 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9093 * BITS_PER_UNIT)
9094 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9096 bitpos0 = off * BITS_PER_UNIT;
9097 offset0 = NULL_TREE;
9102 base1 = arg1;
9103 if (TREE_CODE (arg1) == ADDR_EXPR)
9105 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9106 &bitsize, &bitpos1, &offset1, &mode,
9107 &unsignedp, &volatilep, false);
9108 if (TREE_CODE (base1) == INDIRECT_REF)
9109 base1 = TREE_OPERAND (base1, 0);
9110 else
9111 indirect_base1 = true;
9113 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9115 base1 = TREE_OPERAND (arg1, 0);
9116 STRIP_SIGN_NOPS (base1);
9117 if (TREE_CODE (base1) == ADDR_EXPR)
9119 base1 = TREE_OPERAND (base1, 0);
9120 indirect_base1 = true;
9122 offset1 = TREE_OPERAND (arg1, 1);
9123 if (host_integerp (offset1, 0))
9125 HOST_WIDE_INT off = size_low_cst (offset1);
9126 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9127 * BITS_PER_UNIT)
9128 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9130 bitpos1 = off * BITS_PER_UNIT;
9131 offset1 = NULL_TREE;
9136 /* A local variable can never be pointed to by
9137 the default SSA name of an incoming parameter. */
9138 if ((TREE_CODE (arg0) == ADDR_EXPR
9139 && indirect_base0
9140 && TREE_CODE (base0) == VAR_DECL
9141 && auto_var_in_fn_p (base0, current_function_decl)
9142 && !indirect_base1
9143 && TREE_CODE (base1) == SSA_NAME
9144 && SSA_NAME_IS_DEFAULT_DEF (base1)
9145 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9146 || (TREE_CODE (arg1) == ADDR_EXPR
9147 && indirect_base1
9148 && TREE_CODE (base1) == VAR_DECL
9149 && auto_var_in_fn_p (base1, current_function_decl)
9150 && !indirect_base0
9151 && TREE_CODE (base0) == SSA_NAME
9152 && SSA_NAME_IS_DEFAULT_DEF (base0)
9153 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9155 if (code == NE_EXPR)
9156 return constant_boolean_node (1, type);
9157 else if (code == EQ_EXPR)
9158 return constant_boolean_node (0, type);
9160 /* If we have equivalent bases we might be able to simplify. */
9161 else if (indirect_base0 == indirect_base1
9162 && operand_equal_p (base0, base1, 0))
9164 /* We can fold this expression to a constant if the non-constant
9165 offset parts are equal. */
9166 if ((offset0 == offset1
9167 || (offset0 && offset1
9168 && operand_equal_p (offset0, offset1, 0)))
9169 && (code == EQ_EXPR
9170 || code == NE_EXPR
9171 || (indirect_base0 && DECL_P (base0))
9172 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9175 if (code != EQ_EXPR
9176 && code != NE_EXPR
9177 && bitpos0 != bitpos1
9178 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9179 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9180 fold_overflow_warning (("assuming pointer wraparound does not "
9181 "occur when comparing P +- C1 with "
9182 "P +- C2"),
9183 WARN_STRICT_OVERFLOW_CONDITIONAL);
9185 switch (code)
9187 case EQ_EXPR:
9188 return constant_boolean_node (bitpos0 == bitpos1, type);
9189 case NE_EXPR:
9190 return constant_boolean_node (bitpos0 != bitpos1, type);
9191 case LT_EXPR:
9192 return constant_boolean_node (bitpos0 < bitpos1, type);
9193 case LE_EXPR:
9194 return constant_boolean_node (bitpos0 <= bitpos1, type);
9195 case GE_EXPR:
9196 return constant_boolean_node (bitpos0 >= bitpos1, type);
9197 case GT_EXPR:
9198 return constant_boolean_node (bitpos0 > bitpos1, type);
9199 default:;
9202 /* We can simplify the comparison to a comparison of the variable
9203 offset parts if the constant offset parts are equal.
9204 Be careful to use signed sizetype here because otherwise we
9205 mess with array offsets in the wrong way. This is possible
9206 because pointer arithmetic is restricted to retain within an
9207 object and overflow on pointer differences is undefined as of
9208 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9209 else if (bitpos0 == bitpos1
9210 && ((code == EQ_EXPR || code == NE_EXPR)
9211 || (indirect_base0 && DECL_P (base0))
9212 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9214 /* By converting to signed sizetype we cover middle-end pointer
9215 arithmetic which operates on unsigned pointer types of size
9216 type size and ARRAY_REF offsets which are properly sign or
9217 zero extended from their type in case it is narrower than
9218 sizetype. */
9219 if (offset0 == NULL_TREE)
9220 offset0 = build_int_cst (ssizetype, 0);
9221 else
9222 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9223 if (offset1 == NULL_TREE)
9224 offset1 = build_int_cst (ssizetype, 0);
9225 else
9226 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9228 if (code != EQ_EXPR
9229 && code != NE_EXPR
9230 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9231 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9232 fold_overflow_warning (("assuming pointer wraparound does not "
9233 "occur when comparing P +- C1 with "
9234 "P +- C2"),
9235 WARN_STRICT_OVERFLOW_COMPARISON);
9237 return fold_build2_loc (loc, code, type, offset0, offset1);
9240 /* For non-equal bases we can simplify if they are addresses
9241 of local binding decls or constants. */
9242 else if (indirect_base0 && indirect_base1
9243 /* We know that !operand_equal_p (base0, base1, 0)
9244 because the if condition was false. But make
9245 sure two decls are not the same. */
9246 && base0 != base1
9247 && TREE_CODE (arg0) == ADDR_EXPR
9248 && TREE_CODE (arg1) == ADDR_EXPR
9249 && (((TREE_CODE (base0) == VAR_DECL
9250 || TREE_CODE (base0) == PARM_DECL)
9251 && (targetm.binds_local_p (base0)
9252 || CONSTANT_CLASS_P (base1)))
9253 || CONSTANT_CLASS_P (base0))
9254 && (((TREE_CODE (base1) == VAR_DECL
9255 || TREE_CODE (base1) == PARM_DECL)
9256 && (targetm.binds_local_p (base1)
9257 || CONSTANT_CLASS_P (base0)))
9258 || CONSTANT_CLASS_P (base1)))
9260 if (code == EQ_EXPR)
9261 return omit_two_operands_loc (loc, type, boolean_false_node,
9262 arg0, arg1);
9263 else if (code == NE_EXPR)
9264 return omit_two_operands_loc (loc, type, boolean_true_node,
9265 arg0, arg1);
9267 /* For equal offsets we can simplify to a comparison of the
9268 base addresses. */
9269 else if (bitpos0 == bitpos1
9270 && (indirect_base0
9271 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9272 && (indirect_base1
9273 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9274 && ((offset0 == offset1)
9275 || (offset0 && offset1
9276 && operand_equal_p (offset0, offset1, 0))))
9278 if (indirect_base0)
9279 base0 = build_fold_addr_expr_loc (loc, base0);
9280 if (indirect_base1)
9281 base1 = build_fold_addr_expr_loc (loc, base1);
9282 return fold_build2_loc (loc, code, type, base0, base1);
9286 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9287 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9288 the resulting offset is smaller in absolute value than the
9289 original one. */
9290 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9291 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9292 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9293 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9294 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9295 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9296 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9298 tree const1 = TREE_OPERAND (arg0, 1);
9299 tree const2 = TREE_OPERAND (arg1, 1);
9300 tree variable1 = TREE_OPERAND (arg0, 0);
9301 tree variable2 = TREE_OPERAND (arg1, 0);
9302 tree cst;
9303 const char * const warnmsg = G_("assuming signed overflow does not "
9304 "occur when combining constants around "
9305 "a comparison");
9307 /* Put the constant on the side where it doesn't overflow and is
9308 of lower absolute value than before. */
9309 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9310 ? MINUS_EXPR : PLUS_EXPR,
9311 const2, const1);
9312 if (!TREE_OVERFLOW (cst)
9313 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9315 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9316 return fold_build2_loc (loc, code, type,
9317 variable1,
9318 fold_build2_loc (loc,
9319 TREE_CODE (arg1), TREE_TYPE (arg1),
9320 variable2, cst));
9323 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9324 ? MINUS_EXPR : PLUS_EXPR,
9325 const1, const2);
9326 if (!TREE_OVERFLOW (cst)
9327 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9329 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9330 return fold_build2_loc (loc, code, type,
9331 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9332 variable1, cst),
9333 variable2);
9337 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9338 signed arithmetic case. That form is created by the compiler
9339 often enough for folding it to be of value. One example is in
9340 computing loop trip counts after Operator Strength Reduction. */
9341 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9342 && TREE_CODE (arg0) == MULT_EXPR
9343 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9344 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9345 && integer_zerop (arg1))
9347 tree const1 = TREE_OPERAND (arg0, 1);
9348 tree const2 = arg1; /* zero */
9349 tree variable1 = TREE_OPERAND (arg0, 0);
9350 enum tree_code cmp_code = code;
9352 /* Handle unfolded multiplication by zero. */
9353 if (integer_zerop (const1))
9354 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9356 fold_overflow_warning (("assuming signed overflow does not occur when "
9357 "eliminating multiplication in comparison "
9358 "with zero"),
9359 WARN_STRICT_OVERFLOW_COMPARISON);
9361 /* If const1 is negative we swap the sense of the comparison. */
9362 if (tree_int_cst_sgn (const1) < 0)
9363 cmp_code = swap_tree_comparison (cmp_code);
9365 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9368 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9369 if (tem)
9370 return tem;
9372 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9374 tree targ0 = strip_float_extensions (arg0);
9375 tree targ1 = strip_float_extensions (arg1);
9376 tree newtype = TREE_TYPE (targ0);
9378 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9379 newtype = TREE_TYPE (targ1);
9381 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9382 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9383 return fold_build2_loc (loc, code, type,
9384 fold_convert_loc (loc, newtype, targ0),
9385 fold_convert_loc (loc, newtype, targ1));
9387 /* (-a) CMP (-b) -> b CMP a */
9388 if (TREE_CODE (arg0) == NEGATE_EXPR
9389 && TREE_CODE (arg1) == NEGATE_EXPR)
9390 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9391 TREE_OPERAND (arg0, 0));
9393 if (TREE_CODE (arg1) == REAL_CST)
9395 REAL_VALUE_TYPE cst;
9396 cst = TREE_REAL_CST (arg1);
9398 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9399 if (TREE_CODE (arg0) == NEGATE_EXPR)
9400 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9401 TREE_OPERAND (arg0, 0),
9402 build_real (TREE_TYPE (arg1),
9403 real_value_negate (&cst)));
9405 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9406 /* a CMP (-0) -> a CMP 0 */
9407 if (REAL_VALUE_MINUS_ZERO (cst))
9408 return fold_build2_loc (loc, code, type, arg0,
9409 build_real (TREE_TYPE (arg1), dconst0));
9411 /* x != NaN is always true, other ops are always false. */
9412 if (REAL_VALUE_ISNAN (cst)
9413 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9415 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9416 return omit_one_operand_loc (loc, type, tem, arg0);
9419 /* Fold comparisons against infinity. */
9420 if (REAL_VALUE_ISINF (cst)
9421 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9423 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9424 if (tem != NULL_TREE)
9425 return tem;
9429 /* If this is a comparison of a real constant with a PLUS_EXPR
9430 or a MINUS_EXPR of a real constant, we can convert it into a
9431 comparison with a revised real constant as long as no overflow
9432 occurs when unsafe_math_optimizations are enabled. */
9433 if (flag_unsafe_math_optimizations
9434 && TREE_CODE (arg1) == REAL_CST
9435 && (TREE_CODE (arg0) == PLUS_EXPR
9436 || TREE_CODE (arg0) == MINUS_EXPR)
9437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9438 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9439 ? MINUS_EXPR : PLUS_EXPR,
9440 arg1, TREE_OPERAND (arg0, 1)))
9441 && !TREE_OVERFLOW (tem))
9442 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9444 /* Likewise, we can simplify a comparison of a real constant with
9445 a MINUS_EXPR whose first operand is also a real constant, i.e.
9446 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9447 floating-point types only if -fassociative-math is set. */
9448 if (flag_associative_math
9449 && TREE_CODE (arg1) == REAL_CST
9450 && TREE_CODE (arg0) == MINUS_EXPR
9451 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9452 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9453 arg1))
9454 && !TREE_OVERFLOW (tem))
9455 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9456 TREE_OPERAND (arg0, 1), tem);
9458 /* Fold comparisons against built-in math functions. */
9459 if (TREE_CODE (arg1) == REAL_CST
9460 && flag_unsafe_math_optimizations
9461 && ! flag_errno_math)
9463 enum built_in_function fcode = builtin_mathfn_code (arg0);
9465 if (fcode != END_BUILTINS)
9467 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9468 if (tem != NULL_TREE)
9469 return tem;
9474 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9475 && CONVERT_EXPR_P (arg0))
9477 /* If we are widening one operand of an integer comparison,
9478 see if the other operand is similarly being widened. Perhaps we
9479 can do the comparison in the narrower type. */
9480 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9481 if (tem)
9482 return tem;
9484 /* Or if we are changing signedness. */
9485 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9486 if (tem)
9487 return tem;
9490 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9491 constant, we can simplify it. */
9492 if (TREE_CODE (arg1) == INTEGER_CST
9493 && (TREE_CODE (arg0) == MIN_EXPR
9494 || TREE_CODE (arg0) == MAX_EXPR)
9495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9497 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9498 if (tem)
9499 return tem;
9502 /* Simplify comparison of something with itself. (For IEEE
9503 floating-point, we can only do some of these simplifications.) */
9504 if (operand_equal_p (arg0, arg1, 0))
9506 switch (code)
9508 case EQ_EXPR:
9509 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9510 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9511 return constant_boolean_node (1, type);
9512 break;
9514 case GE_EXPR:
9515 case LE_EXPR:
9516 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9517 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9518 return constant_boolean_node (1, type);
9519 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9521 case NE_EXPR:
9522 /* For NE, we can only do this simplification if integer
9523 or we don't honor IEEE floating point NaNs. */
9524 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9525 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9526 break;
9527 /* ... fall through ... */
9528 case GT_EXPR:
9529 case LT_EXPR:
9530 return constant_boolean_node (0, type);
9531 default:
9532 gcc_unreachable ();
9536 /* If we are comparing an expression that just has comparisons
9537 of two integer values, arithmetic expressions of those comparisons,
9538 and constants, we can simplify it. There are only three cases
9539 to check: the two values can either be equal, the first can be
9540 greater, or the second can be greater. Fold the expression for
9541 those three values. Since each value must be 0 or 1, we have
9542 eight possibilities, each of which corresponds to the constant 0
9543 or 1 or one of the six possible comparisons.
9545 This handles common cases like (a > b) == 0 but also handles
9546 expressions like ((x > y) - (y > x)) > 0, which supposedly
9547 occur in macroized code. */
9549 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9551 tree cval1 = 0, cval2 = 0;
9552 int save_p = 0;
9554 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9555 /* Don't handle degenerate cases here; they should already
9556 have been handled anyway. */
9557 && cval1 != 0 && cval2 != 0
9558 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9559 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9560 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9561 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9562 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9563 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9564 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9566 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9567 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9569 /* We can't just pass T to eval_subst in case cval1 or cval2
9570 was the same as ARG1. */
9572 tree high_result
9573 = fold_build2_loc (loc, code, type,
9574 eval_subst (loc, arg0, cval1, maxval,
9575 cval2, minval),
9576 arg1);
9577 tree equal_result
9578 = fold_build2_loc (loc, code, type,
9579 eval_subst (loc, arg0, cval1, maxval,
9580 cval2, maxval),
9581 arg1);
9582 tree low_result
9583 = fold_build2_loc (loc, code, type,
9584 eval_subst (loc, arg0, cval1, minval,
9585 cval2, maxval),
9586 arg1);
9588 /* All three of these results should be 0 or 1. Confirm they are.
9589 Then use those values to select the proper code to use. */
9591 if (TREE_CODE (high_result) == INTEGER_CST
9592 && TREE_CODE (equal_result) == INTEGER_CST
9593 && TREE_CODE (low_result) == INTEGER_CST)
9595 /* Make a 3-bit mask with the high-order bit being the
9596 value for `>', the next for '=', and the low for '<'. */
9597 switch ((integer_onep (high_result) * 4)
9598 + (integer_onep (equal_result) * 2)
9599 + integer_onep (low_result))
9601 case 0:
9602 /* Always false. */
9603 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9604 case 1:
9605 code = LT_EXPR;
9606 break;
9607 case 2:
9608 code = EQ_EXPR;
9609 break;
9610 case 3:
9611 code = LE_EXPR;
9612 break;
9613 case 4:
9614 code = GT_EXPR;
9615 break;
9616 case 5:
9617 code = NE_EXPR;
9618 break;
9619 case 6:
9620 code = GE_EXPR;
9621 break;
9622 case 7:
9623 /* Always true. */
9624 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9627 if (save_p)
9629 tem = save_expr (build2 (code, type, cval1, cval2));
9630 SET_EXPR_LOCATION (tem, loc);
9631 return tem;
9633 return fold_build2_loc (loc, code, type, cval1, cval2);
9638 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9639 into a single range test. */
9640 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9641 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9642 && TREE_CODE (arg1) == INTEGER_CST
9643 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9644 && !integer_zerop (TREE_OPERAND (arg0, 1))
9645 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9646 && !TREE_OVERFLOW (arg1))
9648 tem = fold_div_compare (loc, code, type, arg0, arg1);
9649 if (tem != NULL_TREE)
9650 return tem;
9653 /* Fold ~X op ~Y as Y op X. */
9654 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9655 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9657 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9658 return fold_build2_loc (loc, code, type,
9659 fold_convert_loc (loc, cmp_type,
9660 TREE_OPERAND (arg1, 0)),
9661 TREE_OPERAND (arg0, 0));
9664 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9665 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9666 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9668 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9669 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9670 TREE_OPERAND (arg0, 0),
9671 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9672 fold_convert_loc (loc, cmp_type, arg1)));
9675 return NULL_TREE;
9679 /* Subroutine of fold_binary. Optimize complex multiplications of the
9680 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9681 argument EXPR represents the expression "z" of type TYPE. */
9683 static tree
9684 fold_mult_zconjz (location_t loc, tree type, tree expr)
9686 tree itype = TREE_TYPE (type);
9687 tree rpart, ipart, tem;
9689 if (TREE_CODE (expr) == COMPLEX_EXPR)
9691 rpart = TREE_OPERAND (expr, 0);
9692 ipart = TREE_OPERAND (expr, 1);
9694 else if (TREE_CODE (expr) == COMPLEX_CST)
9696 rpart = TREE_REALPART (expr);
9697 ipart = TREE_IMAGPART (expr);
9699 else
9701 expr = save_expr (expr);
9702 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9703 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9706 rpart = save_expr (rpart);
9707 ipart = save_expr (ipart);
9708 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9709 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9710 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9711 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9712 build_zero_cst (itype));
9716 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9717 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9718 guarantees that P and N have the same least significant log2(M) bits.
9719 N is not otherwise constrained. In particular, N is not normalized to
9720 0 <= N < M as is common. In general, the precise value of P is unknown.
9721 M is chosen as large as possible such that constant N can be determined.
9723 Returns M and sets *RESIDUE to N.
9725 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9726 account. This is not always possible due to PR 35705.
9729 static unsigned HOST_WIDE_INT
9730 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9731 bool allow_func_align)
9733 enum tree_code code;
9735 *residue = 0;
9737 code = TREE_CODE (expr);
9738 if (code == ADDR_EXPR)
9740 unsigned int bitalign;
9741 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9742 *residue /= BITS_PER_UNIT;
9743 return bitalign / BITS_PER_UNIT;
9745 else if (code == POINTER_PLUS_EXPR)
9747 tree op0, op1;
9748 unsigned HOST_WIDE_INT modulus;
9749 enum tree_code inner_code;
9751 op0 = TREE_OPERAND (expr, 0);
9752 STRIP_NOPS (op0);
9753 modulus = get_pointer_modulus_and_residue (op0, residue,
9754 allow_func_align);
9756 op1 = TREE_OPERAND (expr, 1);
9757 STRIP_NOPS (op1);
9758 inner_code = TREE_CODE (op1);
9759 if (inner_code == INTEGER_CST)
9761 *residue += TREE_INT_CST_LOW (op1);
9762 return modulus;
9764 else if (inner_code == MULT_EXPR)
9766 op1 = TREE_OPERAND (op1, 1);
9767 if (TREE_CODE (op1) == INTEGER_CST)
9769 unsigned HOST_WIDE_INT align;
9771 /* Compute the greatest power-of-2 divisor of op1. */
9772 align = TREE_INT_CST_LOW (op1);
9773 align &= -align;
9775 /* If align is non-zero and less than *modulus, replace
9776 *modulus with align., If align is 0, then either op1 is 0
9777 or the greatest power-of-2 divisor of op1 doesn't fit in an
9778 unsigned HOST_WIDE_INT. In either case, no additional
9779 constraint is imposed. */
9780 if (align)
9781 modulus = MIN (modulus, align);
9783 return modulus;
9788 /* If we get here, we were unable to determine anything useful about the
9789 expression. */
9790 return 1;
9793 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9794 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9796 static bool
9797 vec_cst_ctor_to_array (tree arg, tree *elts)
9799 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9801 if (TREE_CODE (arg) == VECTOR_CST)
9803 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9804 elts[i] = VECTOR_CST_ELT (arg, i);
9806 else if (TREE_CODE (arg) == CONSTRUCTOR)
9808 constructor_elt *elt;
9810 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9811 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9812 return false;
9813 else
9814 elts[i] = elt->value;
9816 else
9817 return false;
9818 for (; i < nelts; i++)
9819 elts[i]
9820 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9821 return true;
9824 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9825 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9826 NULL_TREE otherwise. */
9828 static tree
9829 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9831 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9832 tree *elts;
9833 bool need_ctor = false;
9835 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9836 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9837 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9838 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9839 return NULL_TREE;
9841 elts = XALLOCAVEC (tree, nelts * 3);
9842 if (!vec_cst_ctor_to_array (arg0, elts)
9843 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9844 return NULL_TREE;
9846 for (i = 0; i < nelts; i++)
9848 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9849 need_ctor = true;
9850 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9853 if (need_ctor)
9855 vec<constructor_elt, va_gc> *v;
9856 vec_alloc (v, nelts);
9857 for (i = 0; i < nelts; i++)
9858 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9859 return build_constructor (type, v);
9861 else
9862 return build_vector (type, &elts[2 * nelts]);
9865 /* Try to fold a pointer difference of type TYPE two address expressions of
9866 array references AREF0 and AREF1 using location LOC. Return a
9867 simplified expression for the difference or NULL_TREE. */
9869 static tree
9870 fold_addr_of_array_ref_difference (location_t loc, tree type,
9871 tree aref0, tree aref1)
9873 tree base0 = TREE_OPERAND (aref0, 0);
9874 tree base1 = TREE_OPERAND (aref1, 0);
9875 tree base_offset = build_int_cst (type, 0);
9877 /* If the bases are array references as well, recurse. If the bases
9878 are pointer indirections compute the difference of the pointers.
9879 If the bases are equal, we are set. */
9880 if ((TREE_CODE (base0) == ARRAY_REF
9881 && TREE_CODE (base1) == ARRAY_REF
9882 && (base_offset
9883 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9884 || (INDIRECT_REF_P (base0)
9885 && INDIRECT_REF_P (base1)
9886 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9887 TREE_OPERAND (base0, 0),
9888 TREE_OPERAND (base1, 0))))
9889 || operand_equal_p (base0, base1, 0))
9891 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9892 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9893 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9894 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9895 return fold_build2_loc (loc, PLUS_EXPR, type,
9896 base_offset,
9897 fold_build2_loc (loc, MULT_EXPR, type,
9898 diff, esz));
9900 return NULL_TREE;
9903 /* If the real or vector real constant CST of type TYPE has an exact
9904 inverse, return it, else return NULL. */
9906 static tree
9907 exact_inverse (tree type, tree cst)
9909 REAL_VALUE_TYPE r;
9910 tree unit_type, *elts;
9911 enum machine_mode mode;
9912 unsigned vec_nelts, i;
9914 switch (TREE_CODE (cst))
9916 case REAL_CST:
9917 r = TREE_REAL_CST (cst);
9919 if (exact_real_inverse (TYPE_MODE (type), &r))
9920 return build_real (type, r);
9922 return NULL_TREE;
9924 case VECTOR_CST:
9925 vec_nelts = VECTOR_CST_NELTS (cst);
9926 elts = XALLOCAVEC (tree, vec_nelts);
9927 unit_type = TREE_TYPE (type);
9928 mode = TYPE_MODE (unit_type);
9930 for (i = 0; i < vec_nelts; i++)
9932 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9933 if (!exact_real_inverse (mode, &r))
9934 return NULL_TREE;
9935 elts[i] = build_real (unit_type, r);
9938 return build_vector (type, elts);
9940 default:
9941 return NULL_TREE;
9945 /* Mask out the tz least significant bits of X of type TYPE where
9946 tz is the number of trailing zeroes in Y. */
9947 static double_int
9948 mask_with_tz (tree type, double_int x, double_int y)
9950 int tz = y.trailing_zeros ();
9952 if (tz > 0)
9954 double_int mask;
9956 mask = ~double_int::mask (tz);
9957 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9958 return mask & x;
9960 return x;
9963 /* Fold a binary expression of code CODE and type TYPE with operands
9964 OP0 and OP1. LOC is the location of the resulting expression.
9965 Return the folded expression if folding is successful. Otherwise,
9966 return NULL_TREE. */
9968 tree
9969 fold_binary_loc (location_t loc,
9970 enum tree_code code, tree type, tree op0, tree op1)
9972 enum tree_code_class kind = TREE_CODE_CLASS (code);
9973 tree arg0, arg1, tem;
9974 tree t1 = NULL_TREE;
9975 bool strict_overflow_p;
9976 unsigned int prec;
9978 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9979 && TREE_CODE_LENGTH (code) == 2
9980 && op0 != NULL_TREE
9981 && op1 != NULL_TREE);
9983 arg0 = op0;
9984 arg1 = op1;
9986 /* Strip any conversions that don't change the mode. This is
9987 safe for every expression, except for a comparison expression
9988 because its signedness is derived from its operands. So, in
9989 the latter case, only strip conversions that don't change the
9990 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9991 preserved.
9993 Note that this is done as an internal manipulation within the
9994 constant folder, in order to find the simplest representation
9995 of the arguments so that their form can be studied. In any
9996 cases, the appropriate type conversions should be put back in
9997 the tree that will get out of the constant folder. */
9999 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10001 STRIP_SIGN_NOPS (arg0);
10002 STRIP_SIGN_NOPS (arg1);
10004 else
10006 STRIP_NOPS (arg0);
10007 STRIP_NOPS (arg1);
10010 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10011 constant but we can't do arithmetic on them. */
10012 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10013 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10014 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10015 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10016 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10017 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10018 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10020 if (kind == tcc_binary)
10022 /* Make sure type and arg0 have the same saturating flag. */
10023 gcc_assert (TYPE_SATURATING (type)
10024 == TYPE_SATURATING (TREE_TYPE (arg0)));
10025 tem = const_binop (code, arg0, arg1);
10027 else if (kind == tcc_comparison)
10028 tem = fold_relational_const (code, type, arg0, arg1);
10029 else
10030 tem = NULL_TREE;
10032 if (tem != NULL_TREE)
10034 if (TREE_TYPE (tem) != type)
10035 tem = fold_convert_loc (loc, type, tem);
10036 return tem;
10040 /* If this is a commutative operation, and ARG0 is a constant, move it
10041 to ARG1 to reduce the number of tests below. */
10042 if (commutative_tree_code (code)
10043 && tree_swap_operands_p (arg0, arg1, true))
10044 return fold_build2_loc (loc, code, type, op1, op0);
10046 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10048 First check for cases where an arithmetic operation is applied to a
10049 compound, conditional, or comparison operation. Push the arithmetic
10050 operation inside the compound or conditional to see if any folding
10051 can then be done. Convert comparison to conditional for this purpose.
10052 The also optimizes non-constant cases that used to be done in
10053 expand_expr.
10055 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10056 one of the operands is a comparison and the other is a comparison, a
10057 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10058 code below would make the expression more complex. Change it to a
10059 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10060 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10062 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10063 || code == EQ_EXPR || code == NE_EXPR)
10064 && TREE_CODE (type) != VECTOR_TYPE
10065 && ((truth_value_p (TREE_CODE (arg0))
10066 && (truth_value_p (TREE_CODE (arg1))
10067 || (TREE_CODE (arg1) == BIT_AND_EXPR
10068 && integer_onep (TREE_OPERAND (arg1, 1)))))
10069 || (truth_value_p (TREE_CODE (arg1))
10070 && (truth_value_p (TREE_CODE (arg0))
10071 || (TREE_CODE (arg0) == BIT_AND_EXPR
10072 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10074 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10075 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10076 : TRUTH_XOR_EXPR,
10077 boolean_type_node,
10078 fold_convert_loc (loc, boolean_type_node, arg0),
10079 fold_convert_loc (loc, boolean_type_node, arg1));
10081 if (code == EQ_EXPR)
10082 tem = invert_truthvalue_loc (loc, tem);
10084 return fold_convert_loc (loc, type, tem);
10087 if (TREE_CODE_CLASS (code) == tcc_binary
10088 || TREE_CODE_CLASS (code) == tcc_comparison)
10090 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10092 tem = fold_build2_loc (loc, code, type,
10093 fold_convert_loc (loc, TREE_TYPE (op0),
10094 TREE_OPERAND (arg0, 1)), op1);
10095 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10096 tem);
10098 if (TREE_CODE (arg1) == COMPOUND_EXPR
10099 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10101 tem = fold_build2_loc (loc, code, type, op0,
10102 fold_convert_loc (loc, TREE_TYPE (op1),
10103 TREE_OPERAND (arg1, 1)));
10104 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10105 tem);
10108 if (TREE_CODE (arg0) == COND_EXPR
10109 || TREE_CODE (arg0) == VEC_COND_EXPR
10110 || COMPARISON_CLASS_P (arg0))
10112 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10113 arg0, arg1,
10114 /*cond_first_p=*/1);
10115 if (tem != NULL_TREE)
10116 return tem;
10119 if (TREE_CODE (arg1) == COND_EXPR
10120 || TREE_CODE (arg1) == VEC_COND_EXPR
10121 || COMPARISON_CLASS_P (arg1))
10123 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10124 arg1, arg0,
10125 /*cond_first_p=*/0);
10126 if (tem != NULL_TREE)
10127 return tem;
10131 switch (code)
10133 case MEM_REF:
10134 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10135 if (TREE_CODE (arg0) == ADDR_EXPR
10136 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10138 tree iref = TREE_OPERAND (arg0, 0);
10139 return fold_build2 (MEM_REF, type,
10140 TREE_OPERAND (iref, 0),
10141 int_const_binop (PLUS_EXPR, arg1,
10142 TREE_OPERAND (iref, 1)));
10145 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10146 if (TREE_CODE (arg0) == ADDR_EXPR
10147 && handled_component_p (TREE_OPERAND (arg0, 0)))
10149 tree base;
10150 HOST_WIDE_INT coffset;
10151 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10152 &coffset);
10153 if (!base)
10154 return NULL_TREE;
10155 return fold_build2 (MEM_REF, type,
10156 build_fold_addr_expr (base),
10157 int_const_binop (PLUS_EXPR, arg1,
10158 size_int (coffset)));
10161 return NULL_TREE;
10163 case POINTER_PLUS_EXPR:
10164 /* 0 +p index -> (type)index */
10165 if (integer_zerop (arg0))
10166 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10168 /* PTR +p 0 -> PTR */
10169 if (integer_zerop (arg1))
10170 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10172 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10173 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10174 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10175 return fold_convert_loc (loc, type,
10176 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10177 fold_convert_loc (loc, sizetype,
10178 arg1),
10179 fold_convert_loc (loc, sizetype,
10180 arg0)));
10182 /* (PTR +p B) +p A -> PTR +p (B + A) */
10183 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10185 tree inner;
10186 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10187 tree arg00 = TREE_OPERAND (arg0, 0);
10188 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10189 arg01, fold_convert_loc (loc, sizetype, arg1));
10190 return fold_convert_loc (loc, type,
10191 fold_build_pointer_plus_loc (loc,
10192 arg00, inner));
10195 /* PTR_CST +p CST -> CST1 */
10196 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10197 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10198 fold_convert_loc (loc, type, arg1));
10200 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10201 of the array. Loop optimizer sometimes produce this type of
10202 expressions. */
10203 if (TREE_CODE (arg0) == ADDR_EXPR)
10205 tem = try_move_mult_to_index (loc, arg0,
10206 fold_convert_loc (loc,
10207 ssizetype, arg1));
10208 if (tem)
10209 return fold_convert_loc (loc, type, tem);
10212 return NULL_TREE;
10214 case PLUS_EXPR:
10215 /* A + (-B) -> A - B */
10216 if (TREE_CODE (arg1) == NEGATE_EXPR)
10217 return fold_build2_loc (loc, MINUS_EXPR, type,
10218 fold_convert_loc (loc, type, arg0),
10219 fold_convert_loc (loc, type,
10220 TREE_OPERAND (arg1, 0)));
10221 /* (-A) + B -> B - A */
10222 if (TREE_CODE (arg0) == NEGATE_EXPR
10223 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10224 return fold_build2_loc (loc, MINUS_EXPR, type,
10225 fold_convert_loc (loc, type, arg1),
10226 fold_convert_loc (loc, type,
10227 TREE_OPERAND (arg0, 0)));
10229 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10231 /* Convert ~A + 1 to -A. */
10232 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10233 && integer_onep (arg1))
10234 return fold_build1_loc (loc, NEGATE_EXPR, type,
10235 fold_convert_loc (loc, type,
10236 TREE_OPERAND (arg0, 0)));
10238 /* ~X + X is -1. */
10239 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10240 && !TYPE_OVERFLOW_TRAPS (type))
10242 tree tem = TREE_OPERAND (arg0, 0);
10244 STRIP_NOPS (tem);
10245 if (operand_equal_p (tem, arg1, 0))
10247 t1 = build_all_ones_cst (type);
10248 return omit_one_operand_loc (loc, type, t1, arg1);
10252 /* X + ~X is -1. */
10253 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10254 && !TYPE_OVERFLOW_TRAPS (type))
10256 tree tem = TREE_OPERAND (arg1, 0);
10258 STRIP_NOPS (tem);
10259 if (operand_equal_p (arg0, tem, 0))
10261 t1 = build_all_ones_cst (type);
10262 return omit_one_operand_loc (loc, type, t1, arg0);
10266 /* X + (X / CST) * -CST is X % CST. */
10267 if (TREE_CODE (arg1) == MULT_EXPR
10268 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10269 && operand_equal_p (arg0,
10270 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10272 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10273 tree cst1 = TREE_OPERAND (arg1, 1);
10274 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10275 cst1, cst0);
10276 if (sum && integer_zerop (sum))
10277 return fold_convert_loc (loc, type,
10278 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10279 TREE_TYPE (arg0), arg0,
10280 cst0));
10284 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10285 one. Make sure the type is not saturating and has the signedness of
10286 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10287 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10288 if ((TREE_CODE (arg0) == MULT_EXPR
10289 || TREE_CODE (arg1) == MULT_EXPR)
10290 && !TYPE_SATURATING (type)
10291 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10292 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10293 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10295 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10296 if (tem)
10297 return tem;
10300 if (! FLOAT_TYPE_P (type))
10302 if (integer_zerop (arg1))
10303 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10305 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10306 with a constant, and the two constants have no bits in common,
10307 we should treat this as a BIT_IOR_EXPR since this may produce more
10308 simplifications. */
10309 if (TREE_CODE (arg0) == BIT_AND_EXPR
10310 && TREE_CODE (arg1) == BIT_AND_EXPR
10311 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10312 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10313 && integer_zerop (const_binop (BIT_AND_EXPR,
10314 TREE_OPERAND (arg0, 1),
10315 TREE_OPERAND (arg1, 1))))
10317 code = BIT_IOR_EXPR;
10318 goto bit_ior;
10321 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10322 (plus (plus (mult) (mult)) (foo)) so that we can
10323 take advantage of the factoring cases below. */
10324 if (TYPE_OVERFLOW_WRAPS (type)
10325 && (((TREE_CODE (arg0) == PLUS_EXPR
10326 || TREE_CODE (arg0) == MINUS_EXPR)
10327 && TREE_CODE (arg1) == MULT_EXPR)
10328 || ((TREE_CODE (arg1) == PLUS_EXPR
10329 || TREE_CODE (arg1) == MINUS_EXPR)
10330 && TREE_CODE (arg0) == MULT_EXPR)))
10332 tree parg0, parg1, parg, marg;
10333 enum tree_code pcode;
10335 if (TREE_CODE (arg1) == MULT_EXPR)
10336 parg = arg0, marg = arg1;
10337 else
10338 parg = arg1, marg = arg0;
10339 pcode = TREE_CODE (parg);
10340 parg0 = TREE_OPERAND (parg, 0);
10341 parg1 = TREE_OPERAND (parg, 1);
10342 STRIP_NOPS (parg0);
10343 STRIP_NOPS (parg1);
10345 if (TREE_CODE (parg0) == MULT_EXPR
10346 && TREE_CODE (parg1) != MULT_EXPR)
10347 return fold_build2_loc (loc, pcode, type,
10348 fold_build2_loc (loc, PLUS_EXPR, type,
10349 fold_convert_loc (loc, type,
10350 parg0),
10351 fold_convert_loc (loc, type,
10352 marg)),
10353 fold_convert_loc (loc, type, parg1));
10354 if (TREE_CODE (parg0) != MULT_EXPR
10355 && TREE_CODE (parg1) == MULT_EXPR)
10356 return
10357 fold_build2_loc (loc, PLUS_EXPR, type,
10358 fold_convert_loc (loc, type, parg0),
10359 fold_build2_loc (loc, pcode, type,
10360 fold_convert_loc (loc, type, marg),
10361 fold_convert_loc (loc, type,
10362 parg1)));
10365 else
10367 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10368 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10369 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10371 /* Likewise if the operands are reversed. */
10372 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10373 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10375 /* Convert X + -C into X - C. */
10376 if (TREE_CODE (arg1) == REAL_CST
10377 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10379 tem = fold_negate_const (arg1, type);
10380 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10381 return fold_build2_loc (loc, MINUS_EXPR, type,
10382 fold_convert_loc (loc, type, arg0),
10383 fold_convert_loc (loc, type, tem));
10386 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10387 to __complex__ ( x, y ). This is not the same for SNaNs or
10388 if signed zeros are involved. */
10389 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10390 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10391 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10393 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10394 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10395 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10396 bool arg0rz = false, arg0iz = false;
10397 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10398 || (arg0i && (arg0iz = real_zerop (arg0i))))
10400 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10401 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10402 if (arg0rz && arg1i && real_zerop (arg1i))
10404 tree rp = arg1r ? arg1r
10405 : build1 (REALPART_EXPR, rtype, arg1);
10406 tree ip = arg0i ? arg0i
10407 : build1 (IMAGPART_EXPR, rtype, arg0);
10408 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10410 else if (arg0iz && arg1r && real_zerop (arg1r))
10412 tree rp = arg0r ? arg0r
10413 : build1 (REALPART_EXPR, rtype, arg0);
10414 tree ip = arg1i ? arg1i
10415 : build1 (IMAGPART_EXPR, rtype, arg1);
10416 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10421 if (flag_unsafe_math_optimizations
10422 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10423 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10424 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10425 return tem;
10427 /* Convert x+x into x*2.0. */
10428 if (operand_equal_p (arg0, arg1, 0)
10429 && SCALAR_FLOAT_TYPE_P (type))
10430 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10431 build_real (type, dconst2));
10433 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10434 We associate floats only if the user has specified
10435 -fassociative-math. */
10436 if (flag_associative_math
10437 && TREE_CODE (arg1) == PLUS_EXPR
10438 && TREE_CODE (arg0) != MULT_EXPR)
10440 tree tree10 = TREE_OPERAND (arg1, 0);
10441 tree tree11 = TREE_OPERAND (arg1, 1);
10442 if (TREE_CODE (tree11) == MULT_EXPR
10443 && TREE_CODE (tree10) == MULT_EXPR)
10445 tree tree0;
10446 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10447 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10450 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10451 We associate floats only if the user has specified
10452 -fassociative-math. */
10453 if (flag_associative_math
10454 && TREE_CODE (arg0) == PLUS_EXPR
10455 && TREE_CODE (arg1) != MULT_EXPR)
10457 tree tree00 = TREE_OPERAND (arg0, 0);
10458 tree tree01 = TREE_OPERAND (arg0, 1);
10459 if (TREE_CODE (tree01) == MULT_EXPR
10460 && TREE_CODE (tree00) == MULT_EXPR)
10462 tree tree0;
10463 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10464 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10469 bit_rotate:
10470 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10471 is a rotate of A by C1 bits. */
10472 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10473 is a rotate of A by B bits. */
10475 enum tree_code code0, code1;
10476 tree rtype;
10477 code0 = TREE_CODE (arg0);
10478 code1 = TREE_CODE (arg1);
10479 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10480 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10481 && operand_equal_p (TREE_OPERAND (arg0, 0),
10482 TREE_OPERAND (arg1, 0), 0)
10483 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10484 TYPE_UNSIGNED (rtype))
10485 /* Only create rotates in complete modes. Other cases are not
10486 expanded properly. */
10487 && (element_precision (rtype)
10488 == element_precision (TYPE_MODE (rtype))))
10490 tree tree01, tree11;
10491 enum tree_code code01, code11;
10493 tree01 = TREE_OPERAND (arg0, 1);
10494 tree11 = TREE_OPERAND (arg1, 1);
10495 STRIP_NOPS (tree01);
10496 STRIP_NOPS (tree11);
10497 code01 = TREE_CODE (tree01);
10498 code11 = TREE_CODE (tree11);
10499 if (code01 == INTEGER_CST
10500 && code11 == INTEGER_CST
10501 && TREE_INT_CST_HIGH (tree01) == 0
10502 && TREE_INT_CST_HIGH (tree11) == 0
10503 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10504 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10506 tem = build2_loc (loc, LROTATE_EXPR,
10507 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10508 TREE_OPERAND (arg0, 0),
10509 code0 == LSHIFT_EXPR ? tree01 : tree11);
10510 return fold_convert_loc (loc, type, tem);
10512 else if (code11 == MINUS_EXPR)
10514 tree tree110, tree111;
10515 tree110 = TREE_OPERAND (tree11, 0);
10516 tree111 = TREE_OPERAND (tree11, 1);
10517 STRIP_NOPS (tree110);
10518 STRIP_NOPS (tree111);
10519 if (TREE_CODE (tree110) == INTEGER_CST
10520 && 0 == compare_tree_int (tree110,
10521 element_precision
10522 (TREE_TYPE (TREE_OPERAND
10523 (arg0, 0))))
10524 && operand_equal_p (tree01, tree111, 0))
10525 return
10526 fold_convert_loc (loc, type,
10527 build2 ((code0 == LSHIFT_EXPR
10528 ? LROTATE_EXPR
10529 : RROTATE_EXPR),
10530 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10531 TREE_OPERAND (arg0, 0), tree01));
10533 else if (code01 == MINUS_EXPR)
10535 tree tree010, tree011;
10536 tree010 = TREE_OPERAND (tree01, 0);
10537 tree011 = TREE_OPERAND (tree01, 1);
10538 STRIP_NOPS (tree010);
10539 STRIP_NOPS (tree011);
10540 if (TREE_CODE (tree010) == INTEGER_CST
10541 && 0 == compare_tree_int (tree010,
10542 element_precision
10543 (TREE_TYPE (TREE_OPERAND
10544 (arg0, 0))))
10545 && operand_equal_p (tree11, tree011, 0))
10546 return fold_convert_loc
10547 (loc, type,
10548 build2 ((code0 != LSHIFT_EXPR
10549 ? LROTATE_EXPR
10550 : RROTATE_EXPR),
10551 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10552 TREE_OPERAND (arg0, 0), tree11));
10557 associate:
10558 /* In most languages, can't associate operations on floats through
10559 parentheses. Rather than remember where the parentheses were, we
10560 don't associate floats at all, unless the user has specified
10561 -fassociative-math.
10562 And, we need to make sure type is not saturating. */
10564 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10565 && !TYPE_SATURATING (type))
10567 tree var0, con0, lit0, minus_lit0;
10568 tree var1, con1, lit1, minus_lit1;
10569 tree atype = type;
10570 bool ok = true;
10572 /* Split both trees into variables, constants, and literals. Then
10573 associate each group together, the constants with literals,
10574 then the result with variables. This increases the chances of
10575 literals being recombined later and of generating relocatable
10576 expressions for the sum of a constant and literal. */
10577 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10578 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10579 code == MINUS_EXPR);
10581 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10582 if (code == MINUS_EXPR)
10583 code = PLUS_EXPR;
10585 /* With undefined overflow prefer doing association in a type
10586 which wraps on overflow, if that is one of the operand types. */
10587 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10588 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10590 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10591 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10592 atype = TREE_TYPE (arg0);
10593 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10594 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10595 atype = TREE_TYPE (arg1);
10596 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10599 /* With undefined overflow we can only associate constants with one
10600 variable, and constants whose association doesn't overflow. */
10601 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10602 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10604 if (var0 && var1)
10606 tree tmp0 = var0;
10607 tree tmp1 = var1;
10609 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10610 tmp0 = TREE_OPERAND (tmp0, 0);
10611 if (CONVERT_EXPR_P (tmp0)
10612 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10613 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10614 <= TYPE_PRECISION (atype)))
10615 tmp0 = TREE_OPERAND (tmp0, 0);
10616 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10617 tmp1 = TREE_OPERAND (tmp1, 0);
10618 if (CONVERT_EXPR_P (tmp1)
10619 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10620 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10621 <= TYPE_PRECISION (atype)))
10622 tmp1 = TREE_OPERAND (tmp1, 0);
10623 /* The only case we can still associate with two variables
10624 is if they are the same, modulo negation and bit-pattern
10625 preserving conversions. */
10626 if (!operand_equal_p (tmp0, tmp1, 0))
10627 ok = false;
10631 /* Only do something if we found more than two objects. Otherwise,
10632 nothing has changed and we risk infinite recursion. */
10633 if (ok
10634 && (2 < ((var0 != 0) + (var1 != 0)
10635 + (con0 != 0) + (con1 != 0)
10636 + (lit0 != 0) + (lit1 != 0)
10637 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10639 bool any_overflows = false;
10640 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10641 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10642 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10643 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10644 var0 = associate_trees (loc, var0, var1, code, atype);
10645 con0 = associate_trees (loc, con0, con1, code, atype);
10646 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10647 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10648 code, atype);
10650 /* Preserve the MINUS_EXPR if the negative part of the literal is
10651 greater than the positive part. Otherwise, the multiplicative
10652 folding code (i.e extract_muldiv) may be fooled in case
10653 unsigned constants are subtracted, like in the following
10654 example: ((X*2 + 4) - 8U)/2. */
10655 if (minus_lit0 && lit0)
10657 if (TREE_CODE (lit0) == INTEGER_CST
10658 && TREE_CODE (minus_lit0) == INTEGER_CST
10659 && tree_int_cst_lt (lit0, minus_lit0))
10661 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10662 MINUS_EXPR, atype);
10663 lit0 = 0;
10665 else
10667 lit0 = associate_trees (loc, lit0, minus_lit0,
10668 MINUS_EXPR, atype);
10669 minus_lit0 = 0;
10673 /* Don't introduce overflows through reassociation. */
10674 if (!any_overflows
10675 && ((lit0 && TREE_OVERFLOW (lit0))
10676 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10677 return NULL_TREE;
10679 if (minus_lit0)
10681 if (con0 == 0)
10682 return
10683 fold_convert_loc (loc, type,
10684 associate_trees (loc, var0, minus_lit0,
10685 MINUS_EXPR, atype));
10686 else
10688 con0 = associate_trees (loc, con0, minus_lit0,
10689 MINUS_EXPR, atype);
10690 return
10691 fold_convert_loc (loc, type,
10692 associate_trees (loc, var0, con0,
10693 PLUS_EXPR, atype));
10697 con0 = associate_trees (loc, con0, lit0, code, atype);
10698 return
10699 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10700 code, atype));
10704 return NULL_TREE;
10706 case MINUS_EXPR:
10707 /* Pointer simplifications for subtraction, simple reassociations. */
10708 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10710 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10711 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10712 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10714 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10715 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10716 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10717 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10718 return fold_build2_loc (loc, PLUS_EXPR, type,
10719 fold_build2_loc (loc, MINUS_EXPR, type,
10720 arg00, arg10),
10721 fold_build2_loc (loc, MINUS_EXPR, type,
10722 arg01, arg11));
10724 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10725 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10727 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10728 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10729 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10730 fold_convert_loc (loc, type, arg1));
10731 if (tmp)
10732 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10735 /* A - (-B) -> A + B */
10736 if (TREE_CODE (arg1) == NEGATE_EXPR)
10737 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10738 fold_convert_loc (loc, type,
10739 TREE_OPERAND (arg1, 0)));
10740 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10741 if (TREE_CODE (arg0) == NEGATE_EXPR
10742 && negate_expr_p (arg1)
10743 && reorder_operands_p (arg0, arg1))
10744 return fold_build2_loc (loc, MINUS_EXPR, type,
10745 fold_convert_loc (loc, type,
10746 negate_expr (arg1)),
10747 fold_convert_loc (loc, type,
10748 TREE_OPERAND (arg0, 0)));
10749 /* Convert -A - 1 to ~A. */
10750 if (TREE_CODE (type) != COMPLEX_TYPE
10751 && TREE_CODE (arg0) == NEGATE_EXPR
10752 && integer_onep (arg1)
10753 && !TYPE_OVERFLOW_TRAPS (type))
10754 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10755 fold_convert_loc (loc, type,
10756 TREE_OPERAND (arg0, 0)));
10758 /* Convert -1 - A to ~A. */
10759 if (TREE_CODE (type) != COMPLEX_TYPE
10760 && integer_all_onesp (arg0))
10761 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10764 /* X - (X / Y) * Y is X % Y. */
10765 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10766 && TREE_CODE (arg1) == MULT_EXPR
10767 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10768 && operand_equal_p (arg0,
10769 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10770 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10771 TREE_OPERAND (arg1, 1), 0))
10772 return
10773 fold_convert_loc (loc, type,
10774 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10775 arg0, TREE_OPERAND (arg1, 1)));
10777 if (! FLOAT_TYPE_P (type))
10779 if (integer_zerop (arg0))
10780 return negate_expr (fold_convert_loc (loc, type, arg1));
10781 if (integer_zerop (arg1))
10782 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10784 /* Fold A - (A & B) into ~B & A. */
10785 if (!TREE_SIDE_EFFECTS (arg0)
10786 && TREE_CODE (arg1) == BIT_AND_EXPR)
10788 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10790 tree arg10 = fold_convert_loc (loc, type,
10791 TREE_OPERAND (arg1, 0));
10792 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10793 fold_build1_loc (loc, BIT_NOT_EXPR,
10794 type, arg10),
10795 fold_convert_loc (loc, type, arg0));
10797 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10799 tree arg11 = fold_convert_loc (loc,
10800 type, TREE_OPERAND (arg1, 1));
10801 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10802 fold_build1_loc (loc, BIT_NOT_EXPR,
10803 type, arg11),
10804 fold_convert_loc (loc, type, arg0));
10808 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10809 any power of 2 minus 1. */
10810 if (TREE_CODE (arg0) == BIT_AND_EXPR
10811 && TREE_CODE (arg1) == BIT_AND_EXPR
10812 && operand_equal_p (TREE_OPERAND (arg0, 0),
10813 TREE_OPERAND (arg1, 0), 0))
10815 tree mask0 = TREE_OPERAND (arg0, 1);
10816 tree mask1 = TREE_OPERAND (arg1, 1);
10817 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10819 if (operand_equal_p (tem, mask1, 0))
10821 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10822 TREE_OPERAND (arg0, 0), mask1);
10823 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10828 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10829 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10830 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10832 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10833 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10834 (-ARG1 + ARG0) reduces to -ARG1. */
10835 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10836 return negate_expr (fold_convert_loc (loc, type, arg1));
10838 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10839 __complex__ ( x, -y ). This is not the same for SNaNs or if
10840 signed zeros are involved. */
10841 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10842 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10843 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10845 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10846 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10847 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10848 bool arg0rz = false, arg0iz = false;
10849 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10850 || (arg0i && (arg0iz = real_zerop (arg0i))))
10852 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10853 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10854 if (arg0rz && arg1i && real_zerop (arg1i))
10856 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10857 arg1r ? arg1r
10858 : build1 (REALPART_EXPR, rtype, arg1));
10859 tree ip = arg0i ? arg0i
10860 : build1 (IMAGPART_EXPR, rtype, arg0);
10861 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10863 else if (arg0iz && arg1r && real_zerop (arg1r))
10865 tree rp = arg0r ? arg0r
10866 : build1 (REALPART_EXPR, rtype, arg0);
10867 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10868 arg1i ? arg1i
10869 : build1 (IMAGPART_EXPR, rtype, arg1));
10870 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10875 /* Fold &x - &x. This can happen from &x.foo - &x.
10876 This is unsafe for certain floats even in non-IEEE formats.
10877 In IEEE, it is unsafe because it does wrong for NaNs.
10878 Also note that operand_equal_p is always false if an operand
10879 is volatile. */
10881 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10882 && operand_equal_p (arg0, arg1, 0))
10883 return build_zero_cst (type);
10885 /* A - B -> A + (-B) if B is easily negatable. */
10886 if (negate_expr_p (arg1)
10887 && ((FLOAT_TYPE_P (type)
10888 /* Avoid this transformation if B is a positive REAL_CST. */
10889 && (TREE_CODE (arg1) != REAL_CST
10890 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10891 || INTEGRAL_TYPE_P (type)))
10892 return fold_build2_loc (loc, PLUS_EXPR, type,
10893 fold_convert_loc (loc, type, arg0),
10894 fold_convert_loc (loc, type,
10895 negate_expr (arg1)));
10897 /* Try folding difference of addresses. */
10899 HOST_WIDE_INT diff;
10901 if ((TREE_CODE (arg0) == ADDR_EXPR
10902 || TREE_CODE (arg1) == ADDR_EXPR)
10903 && ptr_difference_const (arg0, arg1, &diff))
10904 return build_int_cst_type (type, diff);
10907 /* Fold &a[i] - &a[j] to i-j. */
10908 if (TREE_CODE (arg0) == ADDR_EXPR
10909 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10910 && TREE_CODE (arg1) == ADDR_EXPR
10911 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10913 tree tem = fold_addr_of_array_ref_difference (loc, type,
10914 TREE_OPERAND (arg0, 0),
10915 TREE_OPERAND (arg1, 0));
10916 if (tem)
10917 return tem;
10920 if (FLOAT_TYPE_P (type)
10921 && flag_unsafe_math_optimizations
10922 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10923 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10924 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10925 return tem;
10927 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10928 one. Make sure the type is not saturating and has the signedness of
10929 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10930 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10931 if ((TREE_CODE (arg0) == MULT_EXPR
10932 || TREE_CODE (arg1) == MULT_EXPR)
10933 && !TYPE_SATURATING (type)
10934 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10935 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10936 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10938 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10939 if (tem)
10940 return tem;
10943 goto associate;
10945 case MULT_EXPR:
10946 /* (-A) * (-B) -> A * B */
10947 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10948 return fold_build2_loc (loc, MULT_EXPR, type,
10949 fold_convert_loc (loc, type,
10950 TREE_OPERAND (arg0, 0)),
10951 fold_convert_loc (loc, type,
10952 negate_expr (arg1)));
10953 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10954 return fold_build2_loc (loc, MULT_EXPR, type,
10955 fold_convert_loc (loc, type,
10956 negate_expr (arg0)),
10957 fold_convert_loc (loc, type,
10958 TREE_OPERAND (arg1, 0)));
10960 if (! FLOAT_TYPE_P (type))
10962 if (integer_zerop (arg1))
10963 return omit_one_operand_loc (loc, type, arg1, arg0);
10964 if (integer_onep (arg1))
10965 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10966 /* Transform x * -1 into -x. Make sure to do the negation
10967 on the original operand with conversions not stripped
10968 because we can only strip non-sign-changing conversions. */
10969 if (integer_minus_onep (arg1))
10970 return fold_convert_loc (loc, type, negate_expr (op0));
10971 /* Transform x * -C into -x * C if x is easily negatable. */
10972 if (TREE_CODE (arg1) == INTEGER_CST
10973 && tree_int_cst_sgn (arg1) == -1
10974 && negate_expr_p (arg0)
10975 && (tem = negate_expr (arg1)) != arg1
10976 && !TREE_OVERFLOW (tem))
10977 return fold_build2_loc (loc, MULT_EXPR, type,
10978 fold_convert_loc (loc, type,
10979 negate_expr (arg0)),
10980 tem);
10982 /* (a * (1 << b)) is (a << b) */
10983 if (TREE_CODE (arg1) == LSHIFT_EXPR
10984 && integer_onep (TREE_OPERAND (arg1, 0)))
10985 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10986 TREE_OPERAND (arg1, 1));
10987 if (TREE_CODE (arg0) == LSHIFT_EXPR
10988 && integer_onep (TREE_OPERAND (arg0, 0)))
10989 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10990 TREE_OPERAND (arg0, 1));
10992 /* (A + A) * C -> A * 2 * C */
10993 if (TREE_CODE (arg0) == PLUS_EXPR
10994 && TREE_CODE (arg1) == INTEGER_CST
10995 && operand_equal_p (TREE_OPERAND (arg0, 0),
10996 TREE_OPERAND (arg0, 1), 0))
10997 return fold_build2_loc (loc, MULT_EXPR, type,
10998 omit_one_operand_loc (loc, type,
10999 TREE_OPERAND (arg0, 0),
11000 TREE_OPERAND (arg0, 1)),
11001 fold_build2_loc (loc, MULT_EXPR, type,
11002 build_int_cst (type, 2) , arg1));
11004 strict_overflow_p = false;
11005 if (TREE_CODE (arg1) == INTEGER_CST
11006 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11007 &strict_overflow_p)))
11009 if (strict_overflow_p)
11010 fold_overflow_warning (("assuming signed overflow does not "
11011 "occur when simplifying "
11012 "multiplication"),
11013 WARN_STRICT_OVERFLOW_MISC);
11014 return fold_convert_loc (loc, type, tem);
11017 /* Optimize z * conj(z) for integer complex numbers. */
11018 if (TREE_CODE (arg0) == CONJ_EXPR
11019 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11020 return fold_mult_zconjz (loc, type, arg1);
11021 if (TREE_CODE (arg1) == CONJ_EXPR
11022 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11023 return fold_mult_zconjz (loc, type, arg0);
11025 else
11027 /* Maybe fold x * 0 to 0. The expressions aren't the same
11028 when x is NaN, since x * 0 is also NaN. Nor are they the
11029 same in modes with signed zeros, since multiplying a
11030 negative value by 0 gives -0, not +0. */
11031 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11032 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11033 && real_zerop (arg1))
11034 return omit_one_operand_loc (loc, type, arg1, arg0);
11035 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11036 Likewise for complex arithmetic with signed zeros. */
11037 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11038 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11039 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11040 && real_onep (arg1))
11041 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11043 /* Transform x * -1.0 into -x. */
11044 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11045 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11046 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11047 && real_minus_onep (arg1))
11048 return fold_convert_loc (loc, type, negate_expr (arg0));
11050 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11051 the result for floating point types due to rounding so it is applied
11052 only if -fassociative-math was specify. */
11053 if (flag_associative_math
11054 && TREE_CODE (arg0) == RDIV_EXPR
11055 && TREE_CODE (arg1) == REAL_CST
11056 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11058 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11059 arg1);
11060 if (tem)
11061 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11062 TREE_OPERAND (arg0, 1));
11065 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11066 if (operand_equal_p (arg0, arg1, 0))
11068 tree tem = fold_strip_sign_ops (arg0);
11069 if (tem != NULL_TREE)
11071 tem = fold_convert_loc (loc, type, tem);
11072 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11076 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11077 This is not the same for NaNs or if signed zeros are
11078 involved. */
11079 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11080 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11081 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11082 && TREE_CODE (arg1) == COMPLEX_CST
11083 && real_zerop (TREE_REALPART (arg1)))
11085 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11086 if (real_onep (TREE_IMAGPART (arg1)))
11087 return
11088 fold_build2_loc (loc, COMPLEX_EXPR, type,
11089 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11090 rtype, arg0)),
11091 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11092 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11093 return
11094 fold_build2_loc (loc, COMPLEX_EXPR, type,
11095 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11096 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11097 rtype, arg0)));
11100 /* Optimize z * conj(z) for floating point complex numbers.
11101 Guarded by flag_unsafe_math_optimizations as non-finite
11102 imaginary components don't produce scalar results. */
11103 if (flag_unsafe_math_optimizations
11104 && TREE_CODE (arg0) == CONJ_EXPR
11105 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11106 return fold_mult_zconjz (loc, type, arg1);
11107 if (flag_unsafe_math_optimizations
11108 && TREE_CODE (arg1) == CONJ_EXPR
11109 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11110 return fold_mult_zconjz (loc, type, arg0);
11112 if (flag_unsafe_math_optimizations)
11114 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11115 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11117 /* Optimizations of root(...)*root(...). */
11118 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11120 tree rootfn, arg;
11121 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11122 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11124 /* Optimize sqrt(x)*sqrt(x) as x. */
11125 if (BUILTIN_SQRT_P (fcode0)
11126 && operand_equal_p (arg00, arg10, 0)
11127 && ! HONOR_SNANS (TYPE_MODE (type)))
11128 return arg00;
11130 /* Optimize root(x)*root(y) as root(x*y). */
11131 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11132 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11133 return build_call_expr_loc (loc, rootfn, 1, arg);
11136 /* Optimize expN(x)*expN(y) as expN(x+y). */
11137 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11139 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11140 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11141 CALL_EXPR_ARG (arg0, 0),
11142 CALL_EXPR_ARG (arg1, 0));
11143 return build_call_expr_loc (loc, expfn, 1, arg);
11146 /* Optimizations of pow(...)*pow(...). */
11147 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11148 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11149 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11151 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11152 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11153 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11154 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11156 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11157 if (operand_equal_p (arg01, arg11, 0))
11159 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11160 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11161 arg00, arg10);
11162 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11165 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11166 if (operand_equal_p (arg00, arg10, 0))
11168 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11169 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11170 arg01, arg11);
11171 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11175 /* Optimize tan(x)*cos(x) as sin(x). */
11176 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11177 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11178 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11179 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11180 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11181 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11182 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11183 CALL_EXPR_ARG (arg1, 0), 0))
11185 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11187 if (sinfn != NULL_TREE)
11188 return build_call_expr_loc (loc, sinfn, 1,
11189 CALL_EXPR_ARG (arg0, 0));
11192 /* Optimize x*pow(x,c) as pow(x,c+1). */
11193 if (fcode1 == BUILT_IN_POW
11194 || fcode1 == BUILT_IN_POWF
11195 || fcode1 == BUILT_IN_POWL)
11197 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11198 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11199 if (TREE_CODE (arg11) == REAL_CST
11200 && !TREE_OVERFLOW (arg11)
11201 && operand_equal_p (arg0, arg10, 0))
11203 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11204 REAL_VALUE_TYPE c;
11205 tree arg;
11207 c = TREE_REAL_CST (arg11);
11208 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11209 arg = build_real (type, c);
11210 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11214 /* Optimize pow(x,c)*x as pow(x,c+1). */
11215 if (fcode0 == BUILT_IN_POW
11216 || fcode0 == BUILT_IN_POWF
11217 || fcode0 == BUILT_IN_POWL)
11219 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11220 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11221 if (TREE_CODE (arg01) == REAL_CST
11222 && !TREE_OVERFLOW (arg01)
11223 && operand_equal_p (arg1, arg00, 0))
11225 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11226 REAL_VALUE_TYPE c;
11227 tree arg;
11229 c = TREE_REAL_CST (arg01);
11230 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11231 arg = build_real (type, c);
11232 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11236 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11237 if (!in_gimple_form
11238 && optimize
11239 && operand_equal_p (arg0, arg1, 0))
11241 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11243 if (powfn)
11245 tree arg = build_real (type, dconst2);
11246 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11251 goto associate;
11253 case BIT_IOR_EXPR:
11254 bit_ior:
11255 if (integer_all_onesp (arg1))
11256 return omit_one_operand_loc (loc, type, arg1, arg0);
11257 if (integer_zerop (arg1))
11258 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11259 if (operand_equal_p (arg0, arg1, 0))
11260 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11262 /* ~X | X is -1. */
11263 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11264 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11266 t1 = build_zero_cst (type);
11267 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11268 return omit_one_operand_loc (loc, type, t1, arg1);
11271 /* X | ~X is -1. */
11272 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11273 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11275 t1 = build_zero_cst (type);
11276 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11277 return omit_one_operand_loc (loc, type, t1, arg0);
11280 /* Canonicalize (X & C1) | C2. */
11281 if (TREE_CODE (arg0) == BIT_AND_EXPR
11282 && TREE_CODE (arg1) == INTEGER_CST
11283 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11285 double_int c1, c2, c3, msk;
11286 int width = TYPE_PRECISION (type), w;
11287 bool try_simplify = true;
11289 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11290 c2 = tree_to_double_int (arg1);
11292 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11293 if ((c1 & c2) == c1)
11294 return omit_one_operand_loc (loc, type, arg1,
11295 TREE_OPERAND (arg0, 0));
11297 msk = double_int::mask (width);
11299 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11300 if (msk.and_not (c1 | c2).is_zero ())
11301 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11302 TREE_OPERAND (arg0, 0), arg1);
11304 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11305 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11306 mode which allows further optimizations. */
11307 c1 &= msk;
11308 c2 &= msk;
11309 c3 = c1.and_not (c2);
11310 for (w = BITS_PER_UNIT;
11311 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11312 w <<= 1)
11314 unsigned HOST_WIDE_INT mask
11315 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11316 if (((c1.low | c2.low) & mask) == mask
11317 && (c1.low & ~mask) == 0 && c1.high == 0)
11319 c3 = double_int::from_uhwi (mask);
11320 break;
11324 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11325 with that optimization from the BIT_AND_EXPR optimizations.
11326 This could end up in an infinite recursion. */
11327 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11328 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11329 == INTEGER_CST)
11331 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11332 double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
11334 try_simplify = (masked != c1);
11337 if (try_simplify && c3 != c1)
11338 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11339 fold_build2_loc (loc, BIT_AND_EXPR, type,
11340 TREE_OPERAND (arg0, 0),
11341 double_int_to_tree (type,
11342 c3)),
11343 arg1);
11346 /* (X & Y) | Y is (X, Y). */
11347 if (TREE_CODE (arg0) == BIT_AND_EXPR
11348 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11349 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11350 /* (X & Y) | X is (Y, X). */
11351 if (TREE_CODE (arg0) == BIT_AND_EXPR
11352 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11353 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11354 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11355 /* X | (X & Y) is (Y, X). */
11356 if (TREE_CODE (arg1) == BIT_AND_EXPR
11357 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11358 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11359 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11360 /* X | (Y & X) is (Y, X). */
11361 if (TREE_CODE (arg1) == BIT_AND_EXPR
11362 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11363 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11364 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11366 /* (X & ~Y) | (~X & Y) is X ^ Y */
11367 if (TREE_CODE (arg0) == BIT_AND_EXPR
11368 && TREE_CODE (arg1) == BIT_AND_EXPR)
11370 tree a0, a1, l0, l1, n0, n1;
11372 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11373 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11375 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11376 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11378 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11379 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11381 if ((operand_equal_p (n0, a0, 0)
11382 && operand_equal_p (n1, a1, 0))
11383 || (operand_equal_p (n0, a1, 0)
11384 && operand_equal_p (n1, a0, 0)))
11385 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11388 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11389 if (t1 != NULL_TREE)
11390 return t1;
11392 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11394 This results in more efficient code for machines without a NAND
11395 instruction. Combine will canonicalize to the first form
11396 which will allow use of NAND instructions provided by the
11397 backend if they exist. */
11398 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11399 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11401 return
11402 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11403 build2 (BIT_AND_EXPR, type,
11404 fold_convert_loc (loc, type,
11405 TREE_OPERAND (arg0, 0)),
11406 fold_convert_loc (loc, type,
11407 TREE_OPERAND (arg1, 0))));
11410 /* See if this can be simplified into a rotate first. If that
11411 is unsuccessful continue in the association code. */
11412 goto bit_rotate;
11414 case BIT_XOR_EXPR:
11415 if (integer_zerop (arg1))
11416 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11417 if (integer_all_onesp (arg1))
11418 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11419 if (operand_equal_p (arg0, arg1, 0))
11420 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11422 /* ~X ^ X is -1. */
11423 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11424 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11426 t1 = build_zero_cst (type);
11427 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11428 return omit_one_operand_loc (loc, type, t1, arg1);
11431 /* X ^ ~X is -1. */
11432 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11433 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11435 t1 = build_zero_cst (type);
11436 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11437 return omit_one_operand_loc (loc, type, t1, arg0);
11440 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11441 with a constant, and the two constants have no bits in common,
11442 we should treat this as a BIT_IOR_EXPR since this may produce more
11443 simplifications. */
11444 if (TREE_CODE (arg0) == BIT_AND_EXPR
11445 && TREE_CODE (arg1) == BIT_AND_EXPR
11446 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11447 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11448 && integer_zerop (const_binop (BIT_AND_EXPR,
11449 TREE_OPERAND (arg0, 1),
11450 TREE_OPERAND (arg1, 1))))
11452 code = BIT_IOR_EXPR;
11453 goto bit_ior;
11456 /* (X | Y) ^ X -> Y & ~ X*/
11457 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11458 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11460 tree t2 = TREE_OPERAND (arg0, 1);
11461 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11462 arg1);
11463 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11464 fold_convert_loc (loc, type, t2),
11465 fold_convert_loc (loc, type, t1));
11466 return t1;
11469 /* (Y | X) ^ X -> Y & ~ X*/
11470 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11471 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11473 tree t2 = TREE_OPERAND (arg0, 0);
11474 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11475 arg1);
11476 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11477 fold_convert_loc (loc, type, t2),
11478 fold_convert_loc (loc, type, t1));
11479 return t1;
11482 /* X ^ (X | Y) -> Y & ~ X*/
11483 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11484 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11486 tree t2 = TREE_OPERAND (arg1, 1);
11487 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11488 arg0);
11489 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11490 fold_convert_loc (loc, type, t2),
11491 fold_convert_loc (loc, type, t1));
11492 return t1;
11495 /* X ^ (Y | X) -> Y & ~ X*/
11496 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11497 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11499 tree t2 = TREE_OPERAND (arg1, 0);
11500 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11501 arg0);
11502 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11503 fold_convert_loc (loc, type, t2),
11504 fold_convert_loc (loc, type, t1));
11505 return t1;
11508 /* Convert ~X ^ ~Y to X ^ Y. */
11509 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11510 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11511 return fold_build2_loc (loc, code, type,
11512 fold_convert_loc (loc, type,
11513 TREE_OPERAND (arg0, 0)),
11514 fold_convert_loc (loc, type,
11515 TREE_OPERAND (arg1, 0)));
11517 /* Convert ~X ^ C to X ^ ~C. */
11518 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11519 && TREE_CODE (arg1) == INTEGER_CST)
11520 return fold_build2_loc (loc, code, type,
11521 fold_convert_loc (loc, type,
11522 TREE_OPERAND (arg0, 0)),
11523 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11525 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11526 if (TREE_CODE (arg0) == BIT_AND_EXPR
11527 && integer_onep (TREE_OPERAND (arg0, 1))
11528 && integer_onep (arg1))
11529 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11530 build_zero_cst (TREE_TYPE (arg0)));
11532 /* Fold (X & Y) ^ Y as ~X & Y. */
11533 if (TREE_CODE (arg0) == BIT_AND_EXPR
11534 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11536 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11537 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11538 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11539 fold_convert_loc (loc, type, arg1));
11541 /* Fold (X & Y) ^ X as ~Y & X. */
11542 if (TREE_CODE (arg0) == BIT_AND_EXPR
11543 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11544 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11546 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11547 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11548 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11549 fold_convert_loc (loc, type, arg1));
11551 /* Fold X ^ (X & Y) as X & ~Y. */
11552 if (TREE_CODE (arg1) == BIT_AND_EXPR
11553 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11555 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11556 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11557 fold_convert_loc (loc, type, arg0),
11558 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11560 /* Fold X ^ (Y & X) as ~Y & X. */
11561 if (TREE_CODE (arg1) == BIT_AND_EXPR
11562 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11563 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11565 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11566 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11567 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11568 fold_convert_loc (loc, type, arg0));
11571 /* See if this can be simplified into a rotate first. If that
11572 is unsuccessful continue in the association code. */
11573 goto bit_rotate;
11575 case BIT_AND_EXPR:
11576 if (integer_all_onesp (arg1))
11577 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11578 if (integer_zerop (arg1))
11579 return omit_one_operand_loc (loc, type, arg1, arg0);
11580 if (operand_equal_p (arg0, arg1, 0))
11581 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11583 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11584 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11585 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11586 || (TREE_CODE (arg0) == EQ_EXPR
11587 && integer_zerop (TREE_OPERAND (arg0, 1))))
11588 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11589 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11591 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11592 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11593 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11594 || (TREE_CODE (arg1) == EQ_EXPR
11595 && integer_zerop (TREE_OPERAND (arg1, 1))))
11596 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11597 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11599 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11600 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11601 && TREE_CODE (arg1) == INTEGER_CST
11602 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11604 tree tmp1 = fold_convert_loc (loc, type, arg1);
11605 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11606 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11607 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11608 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11609 return
11610 fold_convert_loc (loc, type,
11611 fold_build2_loc (loc, BIT_IOR_EXPR,
11612 type, tmp2, tmp3));
11615 /* (X | Y) & Y is (X, Y). */
11616 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11617 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11618 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11619 /* (X | Y) & X is (Y, X). */
11620 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11621 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11622 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11623 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11624 /* X & (X | Y) is (Y, X). */
11625 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11626 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11627 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11628 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11629 /* X & (Y | X) is (Y, X). */
11630 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11631 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11632 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11633 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11635 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11636 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11637 && integer_onep (TREE_OPERAND (arg0, 1))
11638 && integer_onep (arg1))
11640 tree tem2;
11641 tem = TREE_OPERAND (arg0, 0);
11642 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11643 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11644 tem, tem2);
11645 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11646 build_zero_cst (TREE_TYPE (tem)));
11648 /* Fold ~X & 1 as (X & 1) == 0. */
11649 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11650 && integer_onep (arg1))
11652 tree tem2;
11653 tem = TREE_OPERAND (arg0, 0);
11654 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11655 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11656 tem, tem2);
11657 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11658 build_zero_cst (TREE_TYPE (tem)));
11660 /* Fold !X & 1 as X == 0. */
11661 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11662 && integer_onep (arg1))
11664 tem = TREE_OPERAND (arg0, 0);
11665 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11666 build_zero_cst (TREE_TYPE (tem)));
11669 /* Fold (X ^ Y) & Y as ~X & Y. */
11670 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11671 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11673 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11674 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11675 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11676 fold_convert_loc (loc, type, arg1));
11678 /* Fold (X ^ Y) & X as ~Y & X. */
11679 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11680 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11681 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11683 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11684 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11685 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11686 fold_convert_loc (loc, type, arg1));
11688 /* Fold X & (X ^ Y) as X & ~Y. */
11689 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11690 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11692 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11693 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11694 fold_convert_loc (loc, type, arg0),
11695 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11697 /* Fold X & (Y ^ X) as ~Y & X. */
11698 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11699 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11700 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11702 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11703 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11704 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11705 fold_convert_loc (loc, type, arg0));
11708 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11709 multiple of 1 << CST. */
11710 if (TREE_CODE (arg1) == INTEGER_CST)
11712 double_int cst1 = tree_to_double_int (arg1);
11713 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11714 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11715 if ((cst1 & ncst1) == ncst1
11716 && multiple_of_p (type, arg0,
11717 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11718 return fold_convert_loc (loc, type, arg0);
11721 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11722 bits from CST2. */
11723 if (TREE_CODE (arg1) == INTEGER_CST
11724 && TREE_CODE (arg0) == MULT_EXPR
11725 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11727 double_int masked
11728 = mask_with_tz (type, tree_to_double_int (arg1),
11729 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11731 if (masked.is_zero ())
11732 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11733 arg0, arg1);
11734 else if (masked != tree_to_double_int (arg1))
11735 return fold_build2_loc (loc, code, type, op0,
11736 double_int_to_tree (type, masked));
11739 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11740 ((A & N) + B) & M -> (A + B) & M
11741 Similarly if (N & M) == 0,
11742 ((A | N) + B) & M -> (A + B) & M
11743 and for - instead of + (or unary - instead of +)
11744 and/or ^ instead of |.
11745 If B is constant and (B & M) == 0, fold into A & M. */
11746 if (host_integerp (arg1, 1))
11748 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11749 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11750 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11751 && (TREE_CODE (arg0) == PLUS_EXPR
11752 || TREE_CODE (arg0) == MINUS_EXPR
11753 || TREE_CODE (arg0) == NEGATE_EXPR)
11754 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11755 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11757 tree pmop[2];
11758 int which = 0;
11759 unsigned HOST_WIDE_INT cst0;
11761 /* Now we know that arg0 is (C + D) or (C - D) or
11762 -C and arg1 (M) is == (1LL << cst) - 1.
11763 Store C into PMOP[0] and D into PMOP[1]. */
11764 pmop[0] = TREE_OPERAND (arg0, 0);
11765 pmop[1] = NULL;
11766 if (TREE_CODE (arg0) != NEGATE_EXPR)
11768 pmop[1] = TREE_OPERAND (arg0, 1);
11769 which = 1;
11772 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11773 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11774 & cst1) != cst1)
11775 which = -1;
11777 for (; which >= 0; which--)
11778 switch (TREE_CODE (pmop[which]))
11780 case BIT_AND_EXPR:
11781 case BIT_IOR_EXPR:
11782 case BIT_XOR_EXPR:
11783 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11784 != INTEGER_CST)
11785 break;
11786 /* tree_low_cst not used, because we don't care about
11787 the upper bits. */
11788 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11789 cst0 &= cst1;
11790 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11792 if (cst0 != cst1)
11793 break;
11795 else if (cst0 != 0)
11796 break;
11797 /* If C or D is of the form (A & N) where
11798 (N & M) == M, or of the form (A | N) or
11799 (A ^ N) where (N & M) == 0, replace it with A. */
11800 pmop[which] = TREE_OPERAND (pmop[which], 0);
11801 break;
11802 case INTEGER_CST:
11803 /* If C or D is a N where (N & M) == 0, it can be
11804 omitted (assumed 0). */
11805 if ((TREE_CODE (arg0) == PLUS_EXPR
11806 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11807 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11808 pmop[which] = NULL;
11809 break;
11810 default:
11811 break;
11814 /* Only build anything new if we optimized one or both arguments
11815 above. */
11816 if (pmop[0] != TREE_OPERAND (arg0, 0)
11817 || (TREE_CODE (arg0) != NEGATE_EXPR
11818 && pmop[1] != TREE_OPERAND (arg0, 1)))
11820 tree utype = TREE_TYPE (arg0);
11821 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11823 /* Perform the operations in a type that has defined
11824 overflow behavior. */
11825 utype = unsigned_type_for (TREE_TYPE (arg0));
11826 if (pmop[0] != NULL)
11827 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11828 if (pmop[1] != NULL)
11829 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11832 if (TREE_CODE (arg0) == NEGATE_EXPR)
11833 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11834 else if (TREE_CODE (arg0) == PLUS_EXPR)
11836 if (pmop[0] != NULL && pmop[1] != NULL)
11837 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11838 pmop[0], pmop[1]);
11839 else if (pmop[0] != NULL)
11840 tem = pmop[0];
11841 else if (pmop[1] != NULL)
11842 tem = pmop[1];
11843 else
11844 return build_int_cst (type, 0);
11846 else if (pmop[0] == NULL)
11847 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11848 else
11849 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11850 pmop[0], pmop[1]);
11851 /* TEM is now the new binary +, - or unary - replacement. */
11852 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11853 fold_convert_loc (loc, utype, arg1));
11854 return fold_convert_loc (loc, type, tem);
11859 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11860 if (t1 != NULL_TREE)
11861 return t1;
11862 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11863 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11864 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11866 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11868 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11869 && (~TREE_INT_CST_LOW (arg1)
11870 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11871 return
11872 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11875 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11877 This results in more efficient code for machines without a NOR
11878 instruction. Combine will canonicalize to the first form
11879 which will allow use of NOR instructions provided by the
11880 backend if they exist. */
11881 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11882 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11884 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11885 build2 (BIT_IOR_EXPR, type,
11886 fold_convert_loc (loc, type,
11887 TREE_OPERAND (arg0, 0)),
11888 fold_convert_loc (loc, type,
11889 TREE_OPERAND (arg1, 0))));
11892 /* If arg0 is derived from the address of an object or function, we may
11893 be able to fold this expression using the object or function's
11894 alignment. */
11895 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11897 unsigned HOST_WIDE_INT modulus, residue;
11898 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11900 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11901 integer_onep (arg1));
11903 /* This works because modulus is a power of 2. If this weren't the
11904 case, we'd have to replace it by its greatest power-of-2
11905 divisor: modulus & -modulus. */
11906 if (low < modulus)
11907 return build_int_cst (type, residue & low);
11910 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11911 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11912 if the new mask might be further optimized. */
11913 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11914 || TREE_CODE (arg0) == RSHIFT_EXPR)
11915 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11916 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11917 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11918 < TYPE_PRECISION (TREE_TYPE (arg0))
11919 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11920 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11922 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11923 unsigned HOST_WIDE_INT mask
11924 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11925 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11926 tree shift_type = TREE_TYPE (arg0);
11928 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11929 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11930 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11931 && TYPE_PRECISION (TREE_TYPE (arg0))
11932 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11934 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11935 tree arg00 = TREE_OPERAND (arg0, 0);
11936 /* See if more bits can be proven as zero because of
11937 zero extension. */
11938 if (TREE_CODE (arg00) == NOP_EXPR
11939 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11941 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11942 if (TYPE_PRECISION (inner_type)
11943 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11944 && TYPE_PRECISION (inner_type) < prec)
11946 prec = TYPE_PRECISION (inner_type);
11947 /* See if we can shorten the right shift. */
11948 if (shiftc < prec)
11949 shift_type = inner_type;
11952 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11953 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11954 zerobits <<= prec - shiftc;
11955 /* For arithmetic shift if sign bit could be set, zerobits
11956 can contain actually sign bits, so no transformation is
11957 possible, unless MASK masks them all away. In that
11958 case the shift needs to be converted into logical shift. */
11959 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11960 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11962 if ((mask & zerobits) == 0)
11963 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11964 else
11965 zerobits = 0;
11969 /* ((X << 16) & 0xff00) is (X, 0). */
11970 if ((mask & zerobits) == mask)
11971 return omit_one_operand_loc (loc, type,
11972 build_int_cst (type, 0), arg0);
11974 newmask = mask | zerobits;
11975 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11977 /* Only do the transformation if NEWMASK is some integer
11978 mode's mask. */
11979 for (prec = BITS_PER_UNIT;
11980 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11981 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11982 break;
11983 if (prec < HOST_BITS_PER_WIDE_INT
11984 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11986 tree newmaskt;
11988 if (shift_type != TREE_TYPE (arg0))
11990 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11991 fold_convert_loc (loc, shift_type,
11992 TREE_OPERAND (arg0, 0)),
11993 TREE_OPERAND (arg0, 1));
11994 tem = fold_convert_loc (loc, type, tem);
11996 else
11997 tem = op0;
11998 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11999 if (!tree_int_cst_equal (newmaskt, arg1))
12000 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12005 goto associate;
12007 case RDIV_EXPR:
12008 /* Don't touch a floating-point divide by zero unless the mode
12009 of the constant can represent infinity. */
12010 if (TREE_CODE (arg1) == REAL_CST
12011 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12012 && real_zerop (arg1))
12013 return NULL_TREE;
12015 /* Optimize A / A to 1.0 if we don't care about
12016 NaNs or Infinities. Skip the transformation
12017 for non-real operands. */
12018 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12019 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12020 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12021 && operand_equal_p (arg0, arg1, 0))
12023 tree r = build_real (TREE_TYPE (arg0), dconst1);
12025 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12028 /* The complex version of the above A / A optimization. */
12029 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12030 && operand_equal_p (arg0, arg1, 0))
12032 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12033 if (! HONOR_NANS (TYPE_MODE (elem_type))
12034 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12036 tree r = build_real (elem_type, dconst1);
12037 /* omit_two_operands will call fold_convert for us. */
12038 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12042 /* (-A) / (-B) -> A / B */
12043 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12044 return fold_build2_loc (loc, RDIV_EXPR, type,
12045 TREE_OPERAND (arg0, 0),
12046 negate_expr (arg1));
12047 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12048 return fold_build2_loc (loc, RDIV_EXPR, type,
12049 negate_expr (arg0),
12050 TREE_OPERAND (arg1, 0));
12052 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12053 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12054 && real_onep (arg1))
12055 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12057 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12058 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12059 && real_minus_onep (arg1))
12060 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12061 negate_expr (arg0)));
12063 /* If ARG1 is a constant, we can convert this to a multiply by the
12064 reciprocal. This does not have the same rounding properties,
12065 so only do this if -freciprocal-math. We can actually
12066 always safely do it if ARG1 is a power of two, but it's hard to
12067 tell if it is or not in a portable manner. */
12068 if (optimize
12069 && (TREE_CODE (arg1) == REAL_CST
12070 || (TREE_CODE (arg1) == COMPLEX_CST
12071 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12072 || (TREE_CODE (arg1) == VECTOR_CST
12073 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12075 if (flag_reciprocal_math
12076 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12077 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12078 /* Find the reciprocal if optimizing and the result is exact.
12079 TODO: Complex reciprocal not implemented. */
12080 if (TREE_CODE (arg1) != COMPLEX_CST)
12082 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12084 if (inverse)
12085 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12088 /* Convert A/B/C to A/(B*C). */
12089 if (flag_reciprocal_math
12090 && TREE_CODE (arg0) == RDIV_EXPR)
12091 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12092 fold_build2_loc (loc, MULT_EXPR, type,
12093 TREE_OPERAND (arg0, 1), arg1));
12095 /* Convert A/(B/C) to (A/B)*C. */
12096 if (flag_reciprocal_math
12097 && TREE_CODE (arg1) == RDIV_EXPR)
12098 return fold_build2_loc (loc, MULT_EXPR, type,
12099 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12100 TREE_OPERAND (arg1, 0)),
12101 TREE_OPERAND (arg1, 1));
12103 /* Convert C1/(X*C2) into (C1/C2)/X. */
12104 if (flag_reciprocal_math
12105 && TREE_CODE (arg1) == MULT_EXPR
12106 && TREE_CODE (arg0) == REAL_CST
12107 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12109 tree tem = const_binop (RDIV_EXPR, arg0,
12110 TREE_OPERAND (arg1, 1));
12111 if (tem)
12112 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12113 TREE_OPERAND (arg1, 0));
12116 if (flag_unsafe_math_optimizations)
12118 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12119 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12121 /* Optimize sin(x)/cos(x) as tan(x). */
12122 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12123 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12124 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12125 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12126 CALL_EXPR_ARG (arg1, 0), 0))
12128 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12130 if (tanfn != NULL_TREE)
12131 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12134 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12135 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12136 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12137 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12138 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12139 CALL_EXPR_ARG (arg1, 0), 0))
12141 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12143 if (tanfn != NULL_TREE)
12145 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12146 CALL_EXPR_ARG (arg0, 0));
12147 return fold_build2_loc (loc, RDIV_EXPR, type,
12148 build_real (type, dconst1), tmp);
12152 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12153 NaNs or Infinities. */
12154 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12155 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12156 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12158 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12159 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12161 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12162 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12163 && operand_equal_p (arg00, arg01, 0))
12165 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12167 if (cosfn != NULL_TREE)
12168 return build_call_expr_loc (loc, cosfn, 1, arg00);
12172 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12173 NaNs or Infinities. */
12174 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12175 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12176 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12178 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12179 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12181 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12182 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12183 && operand_equal_p (arg00, arg01, 0))
12185 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12187 if (cosfn != NULL_TREE)
12189 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12190 return fold_build2_loc (loc, RDIV_EXPR, type,
12191 build_real (type, dconst1),
12192 tmp);
12197 /* Optimize pow(x,c)/x as pow(x,c-1). */
12198 if (fcode0 == BUILT_IN_POW
12199 || fcode0 == BUILT_IN_POWF
12200 || fcode0 == BUILT_IN_POWL)
12202 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12203 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12204 if (TREE_CODE (arg01) == REAL_CST
12205 && !TREE_OVERFLOW (arg01)
12206 && operand_equal_p (arg1, arg00, 0))
12208 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12209 REAL_VALUE_TYPE c;
12210 tree arg;
12212 c = TREE_REAL_CST (arg01);
12213 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12214 arg = build_real (type, c);
12215 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12219 /* Optimize a/root(b/c) into a*root(c/b). */
12220 if (BUILTIN_ROOT_P (fcode1))
12222 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12224 if (TREE_CODE (rootarg) == RDIV_EXPR)
12226 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12227 tree b = TREE_OPERAND (rootarg, 0);
12228 tree c = TREE_OPERAND (rootarg, 1);
12230 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12232 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12233 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12237 /* Optimize x/expN(y) into x*expN(-y). */
12238 if (BUILTIN_EXPONENT_P (fcode1))
12240 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12241 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12242 arg1 = build_call_expr_loc (loc,
12243 expfn, 1,
12244 fold_convert_loc (loc, type, arg));
12245 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12248 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12249 if (fcode1 == BUILT_IN_POW
12250 || fcode1 == BUILT_IN_POWF
12251 || fcode1 == BUILT_IN_POWL)
12253 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12254 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12255 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12256 tree neg11 = fold_convert_loc (loc, type,
12257 negate_expr (arg11));
12258 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12259 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12262 return NULL_TREE;
12264 case TRUNC_DIV_EXPR:
12265 /* Optimize (X & (-A)) / A where A is a power of 2,
12266 to X >> log2(A) */
12267 if (TREE_CODE (arg0) == BIT_AND_EXPR
12268 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12269 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12271 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12272 arg1, TREE_OPERAND (arg0, 1));
12273 if (sum && integer_zerop (sum)) {
12274 unsigned long pow2;
12276 if (TREE_INT_CST_LOW (arg1))
12277 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12278 else
12279 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12280 + HOST_BITS_PER_WIDE_INT;
12282 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12283 TREE_OPERAND (arg0, 0),
12284 build_int_cst (integer_type_node, pow2));
12288 /* Fall through */
12290 case FLOOR_DIV_EXPR:
12291 /* Simplify A / (B << N) where A and B are positive and B is
12292 a power of 2, to A >> (N + log2(B)). */
12293 strict_overflow_p = false;
12294 if (TREE_CODE (arg1) == LSHIFT_EXPR
12295 && (TYPE_UNSIGNED (type)
12296 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12298 tree sval = TREE_OPERAND (arg1, 0);
12299 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12301 tree sh_cnt = TREE_OPERAND (arg1, 1);
12302 unsigned long pow2;
12304 if (TREE_INT_CST_LOW (sval))
12305 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12306 else
12307 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12308 + HOST_BITS_PER_WIDE_INT;
12310 if (strict_overflow_p)
12311 fold_overflow_warning (("assuming signed overflow does not "
12312 "occur when simplifying A / (B << N)"),
12313 WARN_STRICT_OVERFLOW_MISC);
12315 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12316 sh_cnt,
12317 build_int_cst (TREE_TYPE (sh_cnt),
12318 pow2));
12319 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12320 fold_convert_loc (loc, type, arg0), sh_cnt);
12324 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12325 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12326 if (INTEGRAL_TYPE_P (type)
12327 && TYPE_UNSIGNED (type)
12328 && code == FLOOR_DIV_EXPR)
12329 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12331 /* Fall through */
12333 case ROUND_DIV_EXPR:
12334 case CEIL_DIV_EXPR:
12335 case EXACT_DIV_EXPR:
12336 if (integer_onep (arg1))
12337 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12338 if (integer_zerop (arg1))
12339 return NULL_TREE;
12340 /* X / -1 is -X. */
12341 if (!TYPE_UNSIGNED (type)
12342 && TREE_CODE (arg1) == INTEGER_CST
12343 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12344 && TREE_INT_CST_HIGH (arg1) == -1)
12345 return fold_convert_loc (loc, type, negate_expr (arg0));
12347 /* Convert -A / -B to A / B when the type is signed and overflow is
12348 undefined. */
12349 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12350 && TREE_CODE (arg0) == NEGATE_EXPR
12351 && negate_expr_p (arg1))
12353 if (INTEGRAL_TYPE_P (type))
12354 fold_overflow_warning (("assuming signed overflow does not occur "
12355 "when distributing negation across "
12356 "division"),
12357 WARN_STRICT_OVERFLOW_MISC);
12358 return fold_build2_loc (loc, code, type,
12359 fold_convert_loc (loc, type,
12360 TREE_OPERAND (arg0, 0)),
12361 fold_convert_loc (loc, type,
12362 negate_expr (arg1)));
12364 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12365 && TREE_CODE (arg1) == NEGATE_EXPR
12366 && negate_expr_p (arg0))
12368 if (INTEGRAL_TYPE_P (type))
12369 fold_overflow_warning (("assuming signed overflow does not occur "
12370 "when distributing negation across "
12371 "division"),
12372 WARN_STRICT_OVERFLOW_MISC);
12373 return fold_build2_loc (loc, code, type,
12374 fold_convert_loc (loc, type,
12375 negate_expr (arg0)),
12376 fold_convert_loc (loc, type,
12377 TREE_OPERAND (arg1, 0)));
12380 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12381 operation, EXACT_DIV_EXPR.
12383 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12384 At one time others generated faster code, it's not clear if they do
12385 after the last round to changes to the DIV code in expmed.c. */
12386 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12387 && multiple_of_p (type, arg0, arg1))
12388 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12390 strict_overflow_p = false;
12391 if (TREE_CODE (arg1) == INTEGER_CST
12392 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12393 &strict_overflow_p)))
12395 if (strict_overflow_p)
12396 fold_overflow_warning (("assuming signed overflow does not occur "
12397 "when simplifying division"),
12398 WARN_STRICT_OVERFLOW_MISC);
12399 return fold_convert_loc (loc, type, tem);
12402 return NULL_TREE;
12404 case CEIL_MOD_EXPR:
12405 case FLOOR_MOD_EXPR:
12406 case ROUND_MOD_EXPR:
12407 case TRUNC_MOD_EXPR:
12408 /* X % 1 is always zero, but be sure to preserve any side
12409 effects in X. */
12410 if (integer_onep (arg1))
12411 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12413 /* X % 0, return X % 0 unchanged so that we can get the
12414 proper warnings and errors. */
12415 if (integer_zerop (arg1))
12416 return NULL_TREE;
12418 /* 0 % X is always zero, but be sure to preserve any side
12419 effects in X. Place this after checking for X == 0. */
12420 if (integer_zerop (arg0))
12421 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12423 /* X % -1 is zero. */
12424 if (!TYPE_UNSIGNED (type)
12425 && TREE_CODE (arg1) == INTEGER_CST
12426 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12427 && TREE_INT_CST_HIGH (arg1) == -1)
12428 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12430 /* X % -C is the same as X % C. */
12431 if (code == TRUNC_MOD_EXPR
12432 && !TYPE_UNSIGNED (type)
12433 && TREE_CODE (arg1) == INTEGER_CST
12434 && !TREE_OVERFLOW (arg1)
12435 && TREE_INT_CST_HIGH (arg1) < 0
12436 && !TYPE_OVERFLOW_TRAPS (type)
12437 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12438 && !sign_bit_p (arg1, arg1))
12439 return fold_build2_loc (loc, code, type,
12440 fold_convert_loc (loc, type, arg0),
12441 fold_convert_loc (loc, type,
12442 negate_expr (arg1)));
12444 /* X % -Y is the same as X % Y. */
12445 if (code == TRUNC_MOD_EXPR
12446 && !TYPE_UNSIGNED (type)
12447 && TREE_CODE (arg1) == NEGATE_EXPR
12448 && !TYPE_OVERFLOW_TRAPS (type))
12449 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12450 fold_convert_loc (loc, type,
12451 TREE_OPERAND (arg1, 0)));
12453 strict_overflow_p = false;
12454 if (TREE_CODE (arg1) == INTEGER_CST
12455 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12456 &strict_overflow_p)))
12458 if (strict_overflow_p)
12459 fold_overflow_warning (("assuming signed overflow does not occur "
12460 "when simplifying modulus"),
12461 WARN_STRICT_OVERFLOW_MISC);
12462 return fold_convert_loc (loc, type, tem);
12465 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12466 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12467 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12468 && (TYPE_UNSIGNED (type)
12469 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12471 tree c = arg1;
12472 /* Also optimize A % (C << N) where C is a power of 2,
12473 to A & ((C << N) - 1). */
12474 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12475 c = TREE_OPERAND (arg1, 0);
12477 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12479 tree mask
12480 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12481 build_int_cst (TREE_TYPE (arg1), 1));
12482 if (strict_overflow_p)
12483 fold_overflow_warning (("assuming signed overflow does not "
12484 "occur when simplifying "
12485 "X % (power of two)"),
12486 WARN_STRICT_OVERFLOW_MISC);
12487 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12488 fold_convert_loc (loc, type, arg0),
12489 fold_convert_loc (loc, type, mask));
12493 return NULL_TREE;
12495 case LROTATE_EXPR:
12496 case RROTATE_EXPR:
12497 if (integer_all_onesp (arg0))
12498 return omit_one_operand_loc (loc, type, arg0, arg1);
12499 goto shift;
12501 case RSHIFT_EXPR:
12502 /* Optimize -1 >> x for arithmetic right shifts. */
12503 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12504 && tree_expr_nonnegative_p (arg1))
12505 return omit_one_operand_loc (loc, type, arg0, arg1);
12506 /* ... fall through ... */
12508 case LSHIFT_EXPR:
12509 shift:
12510 if (integer_zerop (arg1))
12511 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12512 if (integer_zerop (arg0))
12513 return omit_one_operand_loc (loc, type, arg0, arg1);
12515 /* Prefer vector1 << scalar to vector1 << vector2
12516 if vector2 is uniform. */
12517 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12518 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12519 return fold_build2_loc (loc, code, type, op0, tem);
12521 /* Since negative shift count is not well-defined,
12522 don't try to compute it in the compiler. */
12523 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12524 return NULL_TREE;
12526 prec = element_precision (type);
12528 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12529 if (TREE_CODE (op0) == code && host_integerp (arg1, true)
12530 && TREE_INT_CST_LOW (arg1) < prec
12531 && host_integerp (TREE_OPERAND (arg0, 1), true)
12532 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12534 unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12535 + TREE_INT_CST_LOW (arg1));
12537 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12538 being well defined. */
12539 if (low >= prec)
12541 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12542 low = low % prec;
12543 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12544 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12545 TREE_OPERAND (arg0, 0));
12546 else
12547 low = prec - 1;
12550 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12551 build_int_cst (TREE_TYPE (arg1), low));
12554 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12555 into x & ((unsigned)-1 >> c) for unsigned types. */
12556 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12557 || (TYPE_UNSIGNED (type)
12558 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12559 && host_integerp (arg1, false)
12560 && TREE_INT_CST_LOW (arg1) < prec
12561 && host_integerp (TREE_OPERAND (arg0, 1), false)
12562 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12564 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12565 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12566 tree lshift;
12567 tree arg00;
12569 if (low0 == low1)
12571 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12573 lshift = build_minus_one_cst (type);
12574 lshift = const_binop (code, lshift, arg1);
12576 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12580 /* Rewrite an LROTATE_EXPR by a constant into an
12581 RROTATE_EXPR by a new constant. */
12582 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12584 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12585 tem = const_binop (MINUS_EXPR, tem, arg1);
12586 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12589 /* If we have a rotate of a bit operation with the rotate count and
12590 the second operand of the bit operation both constant,
12591 permute the two operations. */
12592 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12593 && (TREE_CODE (arg0) == BIT_AND_EXPR
12594 || TREE_CODE (arg0) == BIT_IOR_EXPR
12595 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12596 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12597 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12598 fold_build2_loc (loc, code, type,
12599 TREE_OPERAND (arg0, 0), arg1),
12600 fold_build2_loc (loc, code, type,
12601 TREE_OPERAND (arg0, 1), arg1));
12603 /* Two consecutive rotates adding up to the precision of the
12604 type can be ignored. */
12605 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12606 && TREE_CODE (arg0) == RROTATE_EXPR
12607 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12608 && TREE_INT_CST_HIGH (arg1) == 0
12609 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12610 && ((TREE_INT_CST_LOW (arg1)
12611 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12612 == prec))
12613 return TREE_OPERAND (arg0, 0);
12615 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12616 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12617 if the latter can be further optimized. */
12618 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12619 && TREE_CODE (arg0) == BIT_AND_EXPR
12620 && TREE_CODE (arg1) == INTEGER_CST
12621 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12623 tree mask = fold_build2_loc (loc, code, type,
12624 fold_convert_loc (loc, type,
12625 TREE_OPERAND (arg0, 1)),
12626 arg1);
12627 tree shift = fold_build2_loc (loc, code, type,
12628 fold_convert_loc (loc, type,
12629 TREE_OPERAND (arg0, 0)),
12630 arg1);
12631 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12632 if (tem)
12633 return tem;
12636 return NULL_TREE;
12638 case MIN_EXPR:
12639 if (operand_equal_p (arg0, arg1, 0))
12640 return omit_one_operand_loc (loc, type, arg0, arg1);
12641 if (INTEGRAL_TYPE_P (type)
12642 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12643 return omit_one_operand_loc (loc, type, arg1, arg0);
12644 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12645 if (tem)
12646 return tem;
12647 goto associate;
12649 case MAX_EXPR:
12650 if (operand_equal_p (arg0, arg1, 0))
12651 return omit_one_operand_loc (loc, type, arg0, arg1);
12652 if (INTEGRAL_TYPE_P (type)
12653 && TYPE_MAX_VALUE (type)
12654 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12655 return omit_one_operand_loc (loc, type, arg1, arg0);
12656 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12657 if (tem)
12658 return tem;
12659 goto associate;
12661 case TRUTH_ANDIF_EXPR:
12662 /* Note that the operands of this must be ints
12663 and their values must be 0 or 1.
12664 ("true" is a fixed value perhaps depending on the language.) */
12665 /* If first arg is constant zero, return it. */
12666 if (integer_zerop (arg0))
12667 return fold_convert_loc (loc, type, arg0);
12668 case TRUTH_AND_EXPR:
12669 /* If either arg is constant true, drop it. */
12670 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12671 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12672 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12673 /* Preserve sequence points. */
12674 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12675 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12676 /* If second arg is constant zero, result is zero, but first arg
12677 must be evaluated. */
12678 if (integer_zerop (arg1))
12679 return omit_one_operand_loc (loc, type, arg1, arg0);
12680 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12681 case will be handled here. */
12682 if (integer_zerop (arg0))
12683 return omit_one_operand_loc (loc, type, arg0, arg1);
12685 /* !X && X is always false. */
12686 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12688 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12689 /* X && !X is always false. */
12690 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12691 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12692 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12694 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12695 means A >= Y && A != MAX, but in this case we know that
12696 A < X <= MAX. */
12698 if (!TREE_SIDE_EFFECTS (arg0)
12699 && !TREE_SIDE_EFFECTS (arg1))
12701 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12702 if (tem && !operand_equal_p (tem, arg0, 0))
12703 return fold_build2_loc (loc, code, type, tem, arg1);
12705 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12706 if (tem && !operand_equal_p (tem, arg1, 0))
12707 return fold_build2_loc (loc, code, type, arg0, tem);
12710 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12711 != NULL_TREE)
12712 return tem;
12714 return NULL_TREE;
12716 case TRUTH_ORIF_EXPR:
12717 /* Note that the operands of this must be ints
12718 and their values must be 0 or true.
12719 ("true" is a fixed value perhaps depending on the language.) */
12720 /* If first arg is constant true, return it. */
12721 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12722 return fold_convert_loc (loc, type, arg0);
12723 case TRUTH_OR_EXPR:
12724 /* If either arg is constant zero, drop it. */
12725 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12726 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12727 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12728 /* Preserve sequence points. */
12729 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12730 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12731 /* If second arg is constant true, result is true, but we must
12732 evaluate first arg. */
12733 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12734 return omit_one_operand_loc (loc, type, arg1, arg0);
12735 /* Likewise for first arg, but note this only occurs here for
12736 TRUTH_OR_EXPR. */
12737 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12738 return omit_one_operand_loc (loc, type, arg0, arg1);
12740 /* !X || X is always true. */
12741 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12742 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12743 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12744 /* X || !X is always true. */
12745 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12746 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12747 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12749 /* (X && !Y) || (!X && Y) is X ^ Y */
12750 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12751 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12753 tree a0, a1, l0, l1, n0, n1;
12755 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12756 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12758 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12759 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12761 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12762 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12764 if ((operand_equal_p (n0, a0, 0)
12765 && operand_equal_p (n1, a1, 0))
12766 || (operand_equal_p (n0, a1, 0)
12767 && operand_equal_p (n1, a0, 0)))
12768 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12771 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12772 != NULL_TREE)
12773 return tem;
12775 return NULL_TREE;
12777 case TRUTH_XOR_EXPR:
12778 /* If the second arg is constant zero, drop it. */
12779 if (integer_zerop (arg1))
12780 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12781 /* If the second arg is constant true, this is a logical inversion. */
12782 if (integer_onep (arg1))
12784 tem = invert_truthvalue_loc (loc, arg0);
12785 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12787 /* Identical arguments cancel to zero. */
12788 if (operand_equal_p (arg0, arg1, 0))
12789 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12791 /* !X ^ X is always true. */
12792 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12793 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12794 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12796 /* X ^ !X is always true. */
12797 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12798 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12799 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12801 return NULL_TREE;
12803 case EQ_EXPR:
12804 case NE_EXPR:
12805 STRIP_NOPS (arg0);
12806 STRIP_NOPS (arg1);
12808 tem = fold_comparison (loc, code, type, op0, op1);
12809 if (tem != NULL_TREE)
12810 return tem;
12812 /* bool_var != 0 becomes bool_var. */
12813 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12814 && code == NE_EXPR)
12815 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12817 /* bool_var == 1 becomes bool_var. */
12818 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12819 && code == EQ_EXPR)
12820 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12822 /* bool_var != 1 becomes !bool_var. */
12823 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12824 && code == NE_EXPR)
12825 return fold_convert_loc (loc, type,
12826 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12827 TREE_TYPE (arg0), arg0));
12829 /* bool_var == 0 becomes !bool_var. */
12830 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12831 && code == EQ_EXPR)
12832 return fold_convert_loc (loc, type,
12833 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12834 TREE_TYPE (arg0), arg0));
12836 /* !exp != 0 becomes !exp */
12837 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12838 && code == NE_EXPR)
12839 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12841 /* If this is an equality comparison of the address of two non-weak,
12842 unaliased symbols neither of which are extern (since we do not
12843 have access to attributes for externs), then we know the result. */
12844 if (TREE_CODE (arg0) == ADDR_EXPR
12845 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12846 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12847 && ! lookup_attribute ("alias",
12848 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12849 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12850 && TREE_CODE (arg1) == ADDR_EXPR
12851 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12852 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12853 && ! lookup_attribute ("alias",
12854 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12855 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12857 /* We know that we're looking at the address of two
12858 non-weak, unaliased, static _DECL nodes.
12860 It is both wasteful and incorrect to call operand_equal_p
12861 to compare the two ADDR_EXPR nodes. It is wasteful in that
12862 all we need to do is test pointer equality for the arguments
12863 to the two ADDR_EXPR nodes. It is incorrect to use
12864 operand_equal_p as that function is NOT equivalent to a
12865 C equality test. It can in fact return false for two
12866 objects which would test as equal using the C equality
12867 operator. */
12868 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12869 return constant_boolean_node (equal
12870 ? code == EQ_EXPR : code != EQ_EXPR,
12871 type);
12874 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12875 a MINUS_EXPR of a constant, we can convert it into a comparison with
12876 a revised constant as long as no overflow occurs. */
12877 if (TREE_CODE (arg1) == INTEGER_CST
12878 && (TREE_CODE (arg0) == PLUS_EXPR
12879 || TREE_CODE (arg0) == MINUS_EXPR)
12880 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12881 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12882 ? MINUS_EXPR : PLUS_EXPR,
12883 fold_convert_loc (loc, TREE_TYPE (arg0),
12884 arg1),
12885 TREE_OPERAND (arg0, 1)))
12886 && !TREE_OVERFLOW (tem))
12887 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12889 /* Similarly for a NEGATE_EXPR. */
12890 if (TREE_CODE (arg0) == NEGATE_EXPR
12891 && TREE_CODE (arg1) == INTEGER_CST
12892 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12893 arg1)))
12894 && TREE_CODE (tem) == INTEGER_CST
12895 && !TREE_OVERFLOW (tem))
12896 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12898 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12899 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12900 && TREE_CODE (arg1) == INTEGER_CST
12901 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12902 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12903 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12904 fold_convert_loc (loc,
12905 TREE_TYPE (arg0),
12906 arg1),
12907 TREE_OPERAND (arg0, 1)));
12909 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12910 if ((TREE_CODE (arg0) == PLUS_EXPR
12911 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12912 || TREE_CODE (arg0) == MINUS_EXPR)
12913 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12914 0)),
12915 arg1, 0)
12916 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12917 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12919 tree val = TREE_OPERAND (arg0, 1);
12920 return omit_two_operands_loc (loc, type,
12921 fold_build2_loc (loc, code, type,
12922 val,
12923 build_int_cst (TREE_TYPE (val),
12924 0)),
12925 TREE_OPERAND (arg0, 0), arg1);
12928 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12929 if (TREE_CODE (arg0) == MINUS_EXPR
12930 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12931 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12932 1)),
12933 arg1, 0)
12934 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12936 return omit_two_operands_loc (loc, type,
12937 code == NE_EXPR
12938 ? boolean_true_node : boolean_false_node,
12939 TREE_OPERAND (arg0, 1), arg1);
12942 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12943 for !=. Don't do this for ordered comparisons due to overflow. */
12944 if (TREE_CODE (arg0) == MINUS_EXPR
12945 && integer_zerop (arg1))
12946 return fold_build2_loc (loc, code, type,
12947 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12949 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12950 if (TREE_CODE (arg0) == ABS_EXPR
12951 && (integer_zerop (arg1) || real_zerop (arg1)))
12952 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12954 /* If this is an EQ or NE comparison with zero and ARG0 is
12955 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12956 two operations, but the latter can be done in one less insn
12957 on machines that have only two-operand insns or on which a
12958 constant cannot be the first operand. */
12959 if (TREE_CODE (arg0) == BIT_AND_EXPR
12960 && integer_zerop (arg1))
12962 tree arg00 = TREE_OPERAND (arg0, 0);
12963 tree arg01 = TREE_OPERAND (arg0, 1);
12964 if (TREE_CODE (arg00) == LSHIFT_EXPR
12965 && integer_onep (TREE_OPERAND (arg00, 0)))
12967 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12968 arg01, TREE_OPERAND (arg00, 1));
12969 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12970 build_int_cst (TREE_TYPE (arg0), 1));
12971 return fold_build2_loc (loc, code, type,
12972 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12973 arg1);
12975 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12976 && integer_onep (TREE_OPERAND (arg01, 0)))
12978 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12979 arg00, TREE_OPERAND (arg01, 1));
12980 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12981 build_int_cst (TREE_TYPE (arg0), 1));
12982 return fold_build2_loc (loc, code, type,
12983 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12984 arg1);
12988 /* If this is an NE or EQ comparison of zero against the result of a
12989 signed MOD operation whose second operand is a power of 2, make
12990 the MOD operation unsigned since it is simpler and equivalent. */
12991 if (integer_zerop (arg1)
12992 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12993 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12994 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12995 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12996 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12997 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12999 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13000 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13001 fold_convert_loc (loc, newtype,
13002 TREE_OPERAND (arg0, 0)),
13003 fold_convert_loc (loc, newtype,
13004 TREE_OPERAND (arg0, 1)));
13006 return fold_build2_loc (loc, code, type, newmod,
13007 fold_convert_loc (loc, newtype, arg1));
13010 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13011 C1 is a valid shift constant, and C2 is a power of two, i.e.
13012 a single bit. */
13013 if (TREE_CODE (arg0) == BIT_AND_EXPR
13014 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13015 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13016 == INTEGER_CST
13017 && integer_pow2p (TREE_OPERAND (arg0, 1))
13018 && integer_zerop (arg1))
13020 tree itype = TREE_TYPE (arg0);
13021 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13022 prec = TYPE_PRECISION (itype);
13024 /* Check for a valid shift count. */
13025 if (TREE_INT_CST_HIGH (arg001) == 0
13026 && TREE_INT_CST_LOW (arg001) < prec)
13028 tree arg01 = TREE_OPERAND (arg0, 1);
13029 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13030 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13031 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13032 can be rewritten as (X & (C2 << C1)) != 0. */
13033 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13035 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13036 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13037 return fold_build2_loc (loc, code, type, tem,
13038 fold_convert_loc (loc, itype, arg1));
13040 /* Otherwise, for signed (arithmetic) shifts,
13041 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13042 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13043 else if (!TYPE_UNSIGNED (itype))
13044 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13045 arg000, build_int_cst (itype, 0));
13046 /* Otherwise, of unsigned (logical) shifts,
13047 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13048 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13049 else
13050 return omit_one_operand_loc (loc, type,
13051 code == EQ_EXPR ? integer_one_node
13052 : integer_zero_node,
13053 arg000);
13057 /* If we have (A & C) == C where C is a power of 2, convert this into
13058 (A & C) != 0. Similarly for NE_EXPR. */
13059 if (TREE_CODE (arg0) == BIT_AND_EXPR
13060 && integer_pow2p (TREE_OPERAND (arg0, 1))
13061 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13062 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13063 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13064 integer_zero_node));
13066 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13067 bit, then fold the expression into A < 0 or A >= 0. */
13068 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13069 if (tem)
13070 return tem;
13072 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13073 Similarly for NE_EXPR. */
13074 if (TREE_CODE (arg0) == BIT_AND_EXPR
13075 && TREE_CODE (arg1) == INTEGER_CST
13076 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13078 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13079 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13080 TREE_OPERAND (arg0, 1));
13081 tree dandnotc
13082 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13083 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13084 notc);
13085 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13086 if (integer_nonzerop (dandnotc))
13087 return omit_one_operand_loc (loc, type, rslt, arg0);
13090 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13091 Similarly for NE_EXPR. */
13092 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13093 && TREE_CODE (arg1) == INTEGER_CST
13094 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13096 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13097 tree candnotd
13098 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13099 TREE_OPERAND (arg0, 1),
13100 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13101 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13102 if (integer_nonzerop (candnotd))
13103 return omit_one_operand_loc (loc, type, rslt, arg0);
13106 /* If this is a comparison of a field, we may be able to simplify it. */
13107 if ((TREE_CODE (arg0) == COMPONENT_REF
13108 || TREE_CODE (arg0) == BIT_FIELD_REF)
13109 /* Handle the constant case even without -O
13110 to make sure the warnings are given. */
13111 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13113 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13114 if (t1)
13115 return t1;
13118 /* Optimize comparisons of strlen vs zero to a compare of the
13119 first character of the string vs zero. To wit,
13120 strlen(ptr) == 0 => *ptr == 0
13121 strlen(ptr) != 0 => *ptr != 0
13122 Other cases should reduce to one of these two (or a constant)
13123 due to the return value of strlen being unsigned. */
13124 if (TREE_CODE (arg0) == CALL_EXPR
13125 && integer_zerop (arg1))
13127 tree fndecl = get_callee_fndecl (arg0);
13129 if (fndecl
13130 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13131 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13132 && call_expr_nargs (arg0) == 1
13133 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13135 tree iref = build_fold_indirect_ref_loc (loc,
13136 CALL_EXPR_ARG (arg0, 0));
13137 return fold_build2_loc (loc, code, type, iref,
13138 build_int_cst (TREE_TYPE (iref), 0));
13142 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13143 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13144 if (TREE_CODE (arg0) == RSHIFT_EXPR
13145 && integer_zerop (arg1)
13146 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13148 tree arg00 = TREE_OPERAND (arg0, 0);
13149 tree arg01 = TREE_OPERAND (arg0, 1);
13150 tree itype = TREE_TYPE (arg00);
13151 if (TREE_INT_CST_HIGH (arg01) == 0
13152 && TREE_INT_CST_LOW (arg01)
13153 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13155 if (TYPE_UNSIGNED (itype))
13157 itype = signed_type_for (itype);
13158 arg00 = fold_convert_loc (loc, itype, arg00);
13160 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13161 type, arg00, build_zero_cst (itype));
13165 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13166 if (integer_zerop (arg1)
13167 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13168 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13169 TREE_OPERAND (arg0, 1));
13171 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13172 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13173 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13174 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13175 build_zero_cst (TREE_TYPE (arg0)));
13176 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13177 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13178 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13179 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13180 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13181 build_zero_cst (TREE_TYPE (arg0)));
13183 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13184 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13185 && TREE_CODE (arg1) == INTEGER_CST
13186 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13187 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13188 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13189 TREE_OPERAND (arg0, 1), arg1));
13191 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13192 (X & C) == 0 when C is a single bit. */
13193 if (TREE_CODE (arg0) == BIT_AND_EXPR
13194 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13195 && integer_zerop (arg1)
13196 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13198 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13199 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13200 TREE_OPERAND (arg0, 1));
13201 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13202 type, tem,
13203 fold_convert_loc (loc, TREE_TYPE (arg0),
13204 arg1));
13207 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13208 constant C is a power of two, i.e. a single bit. */
13209 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13210 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13211 && integer_zerop (arg1)
13212 && integer_pow2p (TREE_OPERAND (arg0, 1))
13213 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13214 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13216 tree arg00 = TREE_OPERAND (arg0, 0);
13217 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13218 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13221 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13222 when is C is a power of two, i.e. a single bit. */
13223 if (TREE_CODE (arg0) == BIT_AND_EXPR
13224 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13225 && integer_zerop (arg1)
13226 && integer_pow2p (TREE_OPERAND (arg0, 1))
13227 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13228 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13230 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13231 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13232 arg000, TREE_OPERAND (arg0, 1));
13233 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13234 tem, build_int_cst (TREE_TYPE (tem), 0));
13237 if (integer_zerop (arg1)
13238 && tree_expr_nonzero_p (arg0))
13240 tree res = constant_boolean_node (code==NE_EXPR, type);
13241 return omit_one_operand_loc (loc, type, res, arg0);
13244 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13245 if (TREE_CODE (arg0) == NEGATE_EXPR
13246 && TREE_CODE (arg1) == NEGATE_EXPR)
13247 return fold_build2_loc (loc, code, type,
13248 TREE_OPERAND (arg0, 0),
13249 fold_convert_loc (loc, TREE_TYPE (arg0),
13250 TREE_OPERAND (arg1, 0)));
13252 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13253 if (TREE_CODE (arg0) == BIT_AND_EXPR
13254 && TREE_CODE (arg1) == BIT_AND_EXPR)
13256 tree arg00 = TREE_OPERAND (arg0, 0);
13257 tree arg01 = TREE_OPERAND (arg0, 1);
13258 tree arg10 = TREE_OPERAND (arg1, 0);
13259 tree arg11 = TREE_OPERAND (arg1, 1);
13260 tree itype = TREE_TYPE (arg0);
13262 if (operand_equal_p (arg01, arg11, 0))
13263 return fold_build2_loc (loc, code, type,
13264 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13265 fold_build2_loc (loc,
13266 BIT_XOR_EXPR, itype,
13267 arg00, arg10),
13268 arg01),
13269 build_zero_cst (itype));
13271 if (operand_equal_p (arg01, arg10, 0))
13272 return fold_build2_loc (loc, code, type,
13273 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13274 fold_build2_loc (loc,
13275 BIT_XOR_EXPR, itype,
13276 arg00, arg11),
13277 arg01),
13278 build_zero_cst (itype));
13280 if (operand_equal_p (arg00, arg11, 0))
13281 return fold_build2_loc (loc, code, type,
13282 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13283 fold_build2_loc (loc,
13284 BIT_XOR_EXPR, itype,
13285 arg01, arg10),
13286 arg00),
13287 build_zero_cst (itype));
13289 if (operand_equal_p (arg00, arg10, 0))
13290 return fold_build2_loc (loc, code, type,
13291 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13292 fold_build2_loc (loc,
13293 BIT_XOR_EXPR, itype,
13294 arg01, arg11),
13295 arg00),
13296 build_zero_cst (itype));
13299 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13300 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13302 tree arg00 = TREE_OPERAND (arg0, 0);
13303 tree arg01 = TREE_OPERAND (arg0, 1);
13304 tree arg10 = TREE_OPERAND (arg1, 0);
13305 tree arg11 = TREE_OPERAND (arg1, 1);
13306 tree itype = TREE_TYPE (arg0);
13308 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13309 operand_equal_p guarantees no side-effects so we don't need
13310 to use omit_one_operand on Z. */
13311 if (operand_equal_p (arg01, arg11, 0))
13312 return fold_build2_loc (loc, code, type, arg00,
13313 fold_convert_loc (loc, TREE_TYPE (arg00),
13314 arg10));
13315 if (operand_equal_p (arg01, arg10, 0))
13316 return fold_build2_loc (loc, code, type, arg00,
13317 fold_convert_loc (loc, TREE_TYPE (arg00),
13318 arg11));
13319 if (operand_equal_p (arg00, arg11, 0))
13320 return fold_build2_loc (loc, code, type, arg01,
13321 fold_convert_loc (loc, TREE_TYPE (arg01),
13322 arg10));
13323 if (operand_equal_p (arg00, arg10, 0))
13324 return fold_build2_loc (loc, code, type, arg01,
13325 fold_convert_loc (loc, TREE_TYPE (arg01),
13326 arg11));
13328 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13329 if (TREE_CODE (arg01) == INTEGER_CST
13330 && TREE_CODE (arg11) == INTEGER_CST)
13332 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13333 fold_convert_loc (loc, itype, arg11));
13334 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13335 return fold_build2_loc (loc, code, type, tem,
13336 fold_convert_loc (loc, itype, arg10));
13340 /* Attempt to simplify equality/inequality comparisons of complex
13341 values. Only lower the comparison if the result is known or
13342 can be simplified to a single scalar comparison. */
13343 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13344 || TREE_CODE (arg0) == COMPLEX_CST)
13345 && (TREE_CODE (arg1) == COMPLEX_EXPR
13346 || TREE_CODE (arg1) == COMPLEX_CST))
13348 tree real0, imag0, real1, imag1;
13349 tree rcond, icond;
13351 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13353 real0 = TREE_OPERAND (arg0, 0);
13354 imag0 = TREE_OPERAND (arg0, 1);
13356 else
13358 real0 = TREE_REALPART (arg0);
13359 imag0 = TREE_IMAGPART (arg0);
13362 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13364 real1 = TREE_OPERAND (arg1, 0);
13365 imag1 = TREE_OPERAND (arg1, 1);
13367 else
13369 real1 = TREE_REALPART (arg1);
13370 imag1 = TREE_IMAGPART (arg1);
13373 rcond = fold_binary_loc (loc, code, type, real0, real1);
13374 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13376 if (integer_zerop (rcond))
13378 if (code == EQ_EXPR)
13379 return omit_two_operands_loc (loc, type, boolean_false_node,
13380 imag0, imag1);
13381 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13383 else
13385 if (code == NE_EXPR)
13386 return omit_two_operands_loc (loc, type, boolean_true_node,
13387 imag0, imag1);
13388 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13392 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13393 if (icond && TREE_CODE (icond) == INTEGER_CST)
13395 if (integer_zerop (icond))
13397 if (code == EQ_EXPR)
13398 return omit_two_operands_loc (loc, type, boolean_false_node,
13399 real0, real1);
13400 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13402 else
13404 if (code == NE_EXPR)
13405 return omit_two_operands_loc (loc, type, boolean_true_node,
13406 real0, real1);
13407 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13412 return NULL_TREE;
13414 case LT_EXPR:
13415 case GT_EXPR:
13416 case LE_EXPR:
13417 case GE_EXPR:
13418 tem = fold_comparison (loc, code, type, op0, op1);
13419 if (tem != NULL_TREE)
13420 return tem;
13422 /* Transform comparisons of the form X +- C CMP X. */
13423 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13424 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13425 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13426 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13427 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13428 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13430 tree arg01 = TREE_OPERAND (arg0, 1);
13431 enum tree_code code0 = TREE_CODE (arg0);
13432 int is_positive;
13434 if (TREE_CODE (arg01) == REAL_CST)
13435 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13436 else
13437 is_positive = tree_int_cst_sgn (arg01);
13439 /* (X - c) > X becomes false. */
13440 if (code == GT_EXPR
13441 && ((code0 == MINUS_EXPR && is_positive >= 0)
13442 || (code0 == PLUS_EXPR && is_positive <= 0)))
13444 if (TREE_CODE (arg01) == INTEGER_CST
13445 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13446 fold_overflow_warning (("assuming signed overflow does not "
13447 "occur when assuming that (X - c) > X "
13448 "is always false"),
13449 WARN_STRICT_OVERFLOW_ALL);
13450 return constant_boolean_node (0, type);
13453 /* Likewise (X + c) < X becomes false. */
13454 if (code == LT_EXPR
13455 && ((code0 == PLUS_EXPR && is_positive >= 0)
13456 || (code0 == MINUS_EXPR && is_positive <= 0)))
13458 if (TREE_CODE (arg01) == INTEGER_CST
13459 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13460 fold_overflow_warning (("assuming signed overflow does not "
13461 "occur when assuming that "
13462 "(X + c) < X is always false"),
13463 WARN_STRICT_OVERFLOW_ALL);
13464 return constant_boolean_node (0, type);
13467 /* Convert (X - c) <= X to true. */
13468 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13469 && code == LE_EXPR
13470 && ((code0 == MINUS_EXPR && is_positive >= 0)
13471 || (code0 == PLUS_EXPR && is_positive <= 0)))
13473 if (TREE_CODE (arg01) == INTEGER_CST
13474 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13475 fold_overflow_warning (("assuming signed overflow does not "
13476 "occur when assuming that "
13477 "(X - c) <= X is always true"),
13478 WARN_STRICT_OVERFLOW_ALL);
13479 return constant_boolean_node (1, type);
13482 /* Convert (X + c) >= X to true. */
13483 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13484 && code == GE_EXPR
13485 && ((code0 == PLUS_EXPR && is_positive >= 0)
13486 || (code0 == MINUS_EXPR && is_positive <= 0)))
13488 if (TREE_CODE (arg01) == INTEGER_CST
13489 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13490 fold_overflow_warning (("assuming signed overflow does not "
13491 "occur when assuming that "
13492 "(X + c) >= X is always true"),
13493 WARN_STRICT_OVERFLOW_ALL);
13494 return constant_boolean_node (1, type);
13497 if (TREE_CODE (arg01) == INTEGER_CST)
13499 /* Convert X + c > X and X - c < X to true for integers. */
13500 if (code == GT_EXPR
13501 && ((code0 == PLUS_EXPR && is_positive > 0)
13502 || (code0 == MINUS_EXPR && is_positive < 0)))
13504 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13505 fold_overflow_warning (("assuming signed overflow does "
13506 "not occur when assuming that "
13507 "(X + c) > X is always true"),
13508 WARN_STRICT_OVERFLOW_ALL);
13509 return constant_boolean_node (1, type);
13512 if (code == LT_EXPR
13513 && ((code0 == MINUS_EXPR && is_positive > 0)
13514 || (code0 == PLUS_EXPR && is_positive < 0)))
13516 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13517 fold_overflow_warning (("assuming signed overflow does "
13518 "not occur when assuming that "
13519 "(X - c) < X is always true"),
13520 WARN_STRICT_OVERFLOW_ALL);
13521 return constant_boolean_node (1, type);
13524 /* Convert X + c <= X and X - c >= X to false for integers. */
13525 if (code == LE_EXPR
13526 && ((code0 == PLUS_EXPR && is_positive > 0)
13527 || (code0 == MINUS_EXPR && is_positive < 0)))
13529 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13530 fold_overflow_warning (("assuming signed overflow does "
13531 "not occur when assuming that "
13532 "(X + c) <= X is always false"),
13533 WARN_STRICT_OVERFLOW_ALL);
13534 return constant_boolean_node (0, type);
13537 if (code == GE_EXPR
13538 && ((code0 == MINUS_EXPR && is_positive > 0)
13539 || (code0 == PLUS_EXPR && is_positive < 0)))
13541 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13542 fold_overflow_warning (("assuming signed overflow does "
13543 "not occur when assuming that "
13544 "(X - c) >= X is always false"),
13545 WARN_STRICT_OVERFLOW_ALL);
13546 return constant_boolean_node (0, type);
13551 /* Comparisons with the highest or lowest possible integer of
13552 the specified precision will have known values. */
13554 tree arg1_type = TREE_TYPE (arg1);
13555 unsigned int width = TYPE_PRECISION (arg1_type);
13557 if (TREE_CODE (arg1) == INTEGER_CST
13558 && width <= HOST_BITS_PER_DOUBLE_INT
13559 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13561 HOST_WIDE_INT signed_max_hi;
13562 unsigned HOST_WIDE_INT signed_max_lo;
13563 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13565 if (width <= HOST_BITS_PER_WIDE_INT)
13567 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13568 - 1;
13569 signed_max_hi = 0;
13570 max_hi = 0;
13572 if (TYPE_UNSIGNED (arg1_type))
13574 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13575 min_lo = 0;
13576 min_hi = 0;
13578 else
13580 max_lo = signed_max_lo;
13581 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13582 min_hi = -1;
13585 else
13587 width -= HOST_BITS_PER_WIDE_INT;
13588 signed_max_lo = -1;
13589 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13590 - 1;
13591 max_lo = -1;
13592 min_lo = 0;
13594 if (TYPE_UNSIGNED (arg1_type))
13596 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13597 min_hi = 0;
13599 else
13601 max_hi = signed_max_hi;
13602 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13606 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13607 && TREE_INT_CST_LOW (arg1) == max_lo)
13608 switch (code)
13610 case GT_EXPR:
13611 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13613 case GE_EXPR:
13614 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13616 case LE_EXPR:
13617 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13619 case LT_EXPR:
13620 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13622 /* The GE_EXPR and LT_EXPR cases above are not normally
13623 reached because of previous transformations. */
13625 default:
13626 break;
13628 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13629 == max_hi
13630 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13631 switch (code)
13633 case GT_EXPR:
13634 arg1 = const_binop (PLUS_EXPR, arg1,
13635 build_int_cst (TREE_TYPE (arg1), 1));
13636 return fold_build2_loc (loc, EQ_EXPR, type,
13637 fold_convert_loc (loc,
13638 TREE_TYPE (arg1), arg0),
13639 arg1);
13640 case LE_EXPR:
13641 arg1 = const_binop (PLUS_EXPR, arg1,
13642 build_int_cst (TREE_TYPE (arg1), 1));
13643 return fold_build2_loc (loc, NE_EXPR, type,
13644 fold_convert_loc (loc, TREE_TYPE (arg1),
13645 arg0),
13646 arg1);
13647 default:
13648 break;
13650 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13651 == min_hi
13652 && TREE_INT_CST_LOW (arg1) == min_lo)
13653 switch (code)
13655 case LT_EXPR:
13656 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13658 case LE_EXPR:
13659 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13661 case GE_EXPR:
13662 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13664 case GT_EXPR:
13665 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13667 default:
13668 break;
13670 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13671 == min_hi
13672 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13673 switch (code)
13675 case GE_EXPR:
13676 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13677 return fold_build2_loc (loc, NE_EXPR, type,
13678 fold_convert_loc (loc,
13679 TREE_TYPE (arg1), arg0),
13680 arg1);
13681 case LT_EXPR:
13682 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13683 return fold_build2_loc (loc, EQ_EXPR, type,
13684 fold_convert_loc (loc, TREE_TYPE (arg1),
13685 arg0),
13686 arg1);
13687 default:
13688 break;
13691 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13692 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13693 && TYPE_UNSIGNED (arg1_type)
13694 /* We will flip the signedness of the comparison operator
13695 associated with the mode of arg1, so the sign bit is
13696 specified by this mode. Check that arg1 is the signed
13697 max associated with this sign bit. */
13698 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13699 /* signed_type does not work on pointer types. */
13700 && INTEGRAL_TYPE_P (arg1_type))
13702 /* The following case also applies to X < signed_max+1
13703 and X >= signed_max+1 because previous transformations. */
13704 if (code == LE_EXPR || code == GT_EXPR)
13706 tree st;
13707 st = signed_type_for (TREE_TYPE (arg1));
13708 return fold_build2_loc (loc,
13709 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13710 type, fold_convert_loc (loc, st, arg0),
13711 build_int_cst (st, 0));
13717 /* If we are comparing an ABS_EXPR with a constant, we can
13718 convert all the cases into explicit comparisons, but they may
13719 well not be faster than doing the ABS and one comparison.
13720 But ABS (X) <= C is a range comparison, which becomes a subtraction
13721 and a comparison, and is probably faster. */
13722 if (code == LE_EXPR
13723 && TREE_CODE (arg1) == INTEGER_CST
13724 && TREE_CODE (arg0) == ABS_EXPR
13725 && ! TREE_SIDE_EFFECTS (arg0)
13726 && (0 != (tem = negate_expr (arg1)))
13727 && TREE_CODE (tem) == INTEGER_CST
13728 && !TREE_OVERFLOW (tem))
13729 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13730 build2 (GE_EXPR, type,
13731 TREE_OPERAND (arg0, 0), tem),
13732 build2 (LE_EXPR, type,
13733 TREE_OPERAND (arg0, 0), arg1));
13735 /* Convert ABS_EXPR<x> >= 0 to true. */
13736 strict_overflow_p = false;
13737 if (code == GE_EXPR
13738 && (integer_zerop (arg1)
13739 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13740 && real_zerop (arg1)))
13741 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13743 if (strict_overflow_p)
13744 fold_overflow_warning (("assuming signed overflow does not occur "
13745 "when simplifying comparison of "
13746 "absolute value and zero"),
13747 WARN_STRICT_OVERFLOW_CONDITIONAL);
13748 return omit_one_operand_loc (loc, type,
13749 constant_boolean_node (true, type),
13750 arg0);
13753 /* Convert ABS_EXPR<x> < 0 to false. */
13754 strict_overflow_p = false;
13755 if (code == LT_EXPR
13756 && (integer_zerop (arg1) || real_zerop (arg1))
13757 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13759 if (strict_overflow_p)
13760 fold_overflow_warning (("assuming signed overflow does not occur "
13761 "when simplifying comparison of "
13762 "absolute value and zero"),
13763 WARN_STRICT_OVERFLOW_CONDITIONAL);
13764 return omit_one_operand_loc (loc, type,
13765 constant_boolean_node (false, type),
13766 arg0);
13769 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13770 and similarly for >= into !=. */
13771 if ((code == LT_EXPR || code == GE_EXPR)
13772 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13773 && TREE_CODE (arg1) == LSHIFT_EXPR
13774 && integer_onep (TREE_OPERAND (arg1, 0)))
13775 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13776 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13777 TREE_OPERAND (arg1, 1)),
13778 build_zero_cst (TREE_TYPE (arg0)));
13780 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13781 otherwise Y might be >= # of bits in X's type and thus e.g.
13782 (unsigned char) (1 << Y) for Y 15 might be 0.
13783 If the cast is widening, then 1 << Y should have unsigned type,
13784 otherwise if Y is number of bits in the signed shift type minus 1,
13785 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13786 31 might be 0xffffffff80000000. */
13787 if ((code == LT_EXPR || code == GE_EXPR)
13788 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13789 && CONVERT_EXPR_P (arg1)
13790 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13791 && (TYPE_PRECISION (TREE_TYPE (arg1))
13792 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13793 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13794 || (TYPE_PRECISION (TREE_TYPE (arg1))
13795 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13796 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13798 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13799 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13800 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13801 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13802 build_zero_cst (TREE_TYPE (arg0)));
13805 return NULL_TREE;
13807 case UNORDERED_EXPR:
13808 case ORDERED_EXPR:
13809 case UNLT_EXPR:
13810 case UNLE_EXPR:
13811 case UNGT_EXPR:
13812 case UNGE_EXPR:
13813 case UNEQ_EXPR:
13814 case LTGT_EXPR:
13815 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13817 t1 = fold_relational_const (code, type, arg0, arg1);
13818 if (t1 != NULL_TREE)
13819 return t1;
13822 /* If the first operand is NaN, the result is constant. */
13823 if (TREE_CODE (arg0) == REAL_CST
13824 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13825 && (code != LTGT_EXPR || ! flag_trapping_math))
13827 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13828 ? integer_zero_node
13829 : integer_one_node;
13830 return omit_one_operand_loc (loc, type, t1, arg1);
13833 /* If the second operand is NaN, the result is constant. */
13834 if (TREE_CODE (arg1) == REAL_CST
13835 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13836 && (code != LTGT_EXPR || ! flag_trapping_math))
13838 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13839 ? integer_zero_node
13840 : integer_one_node;
13841 return omit_one_operand_loc (loc, type, t1, arg0);
13844 /* Simplify unordered comparison of something with itself. */
13845 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13846 && operand_equal_p (arg0, arg1, 0))
13847 return constant_boolean_node (1, type);
13849 if (code == LTGT_EXPR
13850 && !flag_trapping_math
13851 && operand_equal_p (arg0, arg1, 0))
13852 return constant_boolean_node (0, type);
13854 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13856 tree targ0 = strip_float_extensions (arg0);
13857 tree targ1 = strip_float_extensions (arg1);
13858 tree newtype = TREE_TYPE (targ0);
13860 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13861 newtype = TREE_TYPE (targ1);
13863 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13864 return fold_build2_loc (loc, code, type,
13865 fold_convert_loc (loc, newtype, targ0),
13866 fold_convert_loc (loc, newtype, targ1));
13869 return NULL_TREE;
13871 case COMPOUND_EXPR:
13872 /* When pedantic, a compound expression can be neither an lvalue
13873 nor an integer constant expression. */
13874 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13875 return NULL_TREE;
13876 /* Don't let (0, 0) be null pointer constant. */
13877 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13878 : fold_convert_loc (loc, type, arg1);
13879 return pedantic_non_lvalue_loc (loc, tem);
13881 case COMPLEX_EXPR:
13882 if ((TREE_CODE (arg0) == REAL_CST
13883 && TREE_CODE (arg1) == REAL_CST)
13884 || (TREE_CODE (arg0) == INTEGER_CST
13885 && TREE_CODE (arg1) == INTEGER_CST))
13886 return build_complex (type, arg0, arg1);
13887 if (TREE_CODE (arg0) == REALPART_EXPR
13888 && TREE_CODE (arg1) == IMAGPART_EXPR
13889 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13890 && operand_equal_p (TREE_OPERAND (arg0, 0),
13891 TREE_OPERAND (arg1, 0), 0))
13892 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13893 TREE_OPERAND (arg1, 0));
13894 return NULL_TREE;
13896 case ASSERT_EXPR:
13897 /* An ASSERT_EXPR should never be passed to fold_binary. */
13898 gcc_unreachable ();
13900 case VEC_PACK_TRUNC_EXPR:
13901 case VEC_PACK_FIX_TRUNC_EXPR:
13903 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13904 tree *elts;
13906 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13907 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13908 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13909 return NULL_TREE;
13911 elts = XALLOCAVEC (tree, nelts);
13912 if (!vec_cst_ctor_to_array (arg0, elts)
13913 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13914 return NULL_TREE;
13916 for (i = 0; i < nelts; i++)
13918 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13919 ? NOP_EXPR : FIX_TRUNC_EXPR,
13920 TREE_TYPE (type), elts[i]);
13921 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13922 return NULL_TREE;
13925 return build_vector (type, elts);
13928 case VEC_WIDEN_MULT_LO_EXPR:
13929 case VEC_WIDEN_MULT_HI_EXPR:
13930 case VEC_WIDEN_MULT_EVEN_EXPR:
13931 case VEC_WIDEN_MULT_ODD_EXPR:
13933 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13934 unsigned int out, ofs, scale;
13935 tree *elts;
13937 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13938 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13939 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13940 return NULL_TREE;
13942 elts = XALLOCAVEC (tree, nelts * 4);
13943 if (!vec_cst_ctor_to_array (arg0, elts)
13944 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13945 return NULL_TREE;
13947 if (code == VEC_WIDEN_MULT_LO_EXPR)
13948 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13949 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13950 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13951 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13952 scale = 1, ofs = 0;
13953 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13954 scale = 1, ofs = 1;
13956 for (out = 0; out < nelts; out++)
13958 unsigned int in1 = (out << scale) + ofs;
13959 unsigned int in2 = in1 + nelts * 2;
13960 tree t1, t2;
13962 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13963 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13965 if (t1 == NULL_TREE || t2 == NULL_TREE)
13966 return NULL_TREE;
13967 elts[out] = const_binop (MULT_EXPR, t1, t2);
13968 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13969 return NULL_TREE;
13972 return build_vector (type, elts);
13975 default:
13976 return NULL_TREE;
13977 } /* switch (code) */
13980 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13981 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13982 of GOTO_EXPR. */
13984 static tree
13985 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13987 switch (TREE_CODE (*tp))
13989 case LABEL_EXPR:
13990 return *tp;
13992 case GOTO_EXPR:
13993 *walk_subtrees = 0;
13995 /* ... fall through ... */
13997 default:
13998 return NULL_TREE;
14002 /* Return whether the sub-tree ST contains a label which is accessible from
14003 outside the sub-tree. */
14005 static bool
14006 contains_label_p (tree st)
14008 return
14009 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14012 /* Fold a ternary expression of code CODE and type TYPE with operands
14013 OP0, OP1, and OP2. Return the folded expression if folding is
14014 successful. Otherwise, return NULL_TREE. */
14016 tree
14017 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14018 tree op0, tree op1, tree op2)
14020 tree tem;
14021 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14022 enum tree_code_class kind = TREE_CODE_CLASS (code);
14024 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14025 && TREE_CODE_LENGTH (code) == 3);
14027 /* Strip any conversions that don't change the mode. This is safe
14028 for every expression, except for a comparison expression because
14029 its signedness is derived from its operands. So, in the latter
14030 case, only strip conversions that don't change the signedness.
14032 Note that this is done as an internal manipulation within the
14033 constant folder, in order to find the simplest representation of
14034 the arguments so that their form can be studied. In any cases,
14035 the appropriate type conversions should be put back in the tree
14036 that will get out of the constant folder. */
14037 if (op0)
14039 arg0 = op0;
14040 STRIP_NOPS (arg0);
14043 if (op1)
14045 arg1 = op1;
14046 STRIP_NOPS (arg1);
14049 if (op2)
14051 arg2 = op2;
14052 STRIP_NOPS (arg2);
14055 switch (code)
14057 case COMPONENT_REF:
14058 if (TREE_CODE (arg0) == CONSTRUCTOR
14059 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14061 unsigned HOST_WIDE_INT idx;
14062 tree field, value;
14063 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14064 if (field == arg1)
14065 return value;
14067 return NULL_TREE;
14069 case COND_EXPR:
14070 case VEC_COND_EXPR:
14071 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14072 so all simple results must be passed through pedantic_non_lvalue. */
14073 if (TREE_CODE (arg0) == INTEGER_CST)
14075 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14076 tem = integer_zerop (arg0) ? op2 : op1;
14077 /* Only optimize constant conditions when the selected branch
14078 has the same type as the COND_EXPR. This avoids optimizing
14079 away "c ? x : throw", where the throw has a void type.
14080 Avoid throwing away that operand which contains label. */
14081 if ((!TREE_SIDE_EFFECTS (unused_op)
14082 || !contains_label_p (unused_op))
14083 && (! VOID_TYPE_P (TREE_TYPE (tem))
14084 || VOID_TYPE_P (type)))
14085 return pedantic_non_lvalue_loc (loc, tem);
14086 return NULL_TREE;
14088 else if (TREE_CODE (arg0) == VECTOR_CST)
14090 if (integer_all_onesp (arg0))
14091 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14092 if (integer_zerop (arg0))
14093 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14095 if ((TREE_CODE (arg1) == VECTOR_CST
14096 || TREE_CODE (arg1) == CONSTRUCTOR)
14097 && (TREE_CODE (arg2) == VECTOR_CST
14098 || TREE_CODE (arg2) == CONSTRUCTOR))
14100 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14101 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14102 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14103 for (i = 0; i < nelts; i++)
14105 tree val = VECTOR_CST_ELT (arg0, i);
14106 if (integer_all_onesp (val))
14107 sel[i] = i;
14108 else if (integer_zerop (val))
14109 sel[i] = nelts + i;
14110 else /* Currently unreachable. */
14111 return NULL_TREE;
14113 tree t = fold_vec_perm (type, arg1, arg2, sel);
14114 if (t != NULL_TREE)
14115 return t;
14119 if (operand_equal_p (arg1, op2, 0))
14120 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14122 /* If we have A op B ? A : C, we may be able to convert this to a
14123 simpler expression, depending on the operation and the values
14124 of B and C. Signed zeros prevent all of these transformations,
14125 for reasons given above each one.
14127 Also try swapping the arguments and inverting the conditional. */
14128 if (COMPARISON_CLASS_P (arg0)
14129 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14130 arg1, TREE_OPERAND (arg0, 1))
14131 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14133 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14134 if (tem)
14135 return tem;
14138 if (COMPARISON_CLASS_P (arg0)
14139 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14140 op2,
14141 TREE_OPERAND (arg0, 1))
14142 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14144 location_t loc0 = expr_location_or (arg0, loc);
14145 tem = fold_invert_truthvalue (loc0, arg0);
14146 if (tem && COMPARISON_CLASS_P (tem))
14148 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14149 if (tem)
14150 return tem;
14154 /* If the second operand is simpler than the third, swap them
14155 since that produces better jump optimization results. */
14156 if (truth_value_p (TREE_CODE (arg0))
14157 && tree_swap_operands_p (op1, op2, false))
14159 location_t loc0 = expr_location_or (arg0, loc);
14160 /* See if this can be inverted. If it can't, possibly because
14161 it was a floating-point inequality comparison, don't do
14162 anything. */
14163 tem = fold_invert_truthvalue (loc0, arg0);
14164 if (tem)
14165 return fold_build3_loc (loc, code, type, tem, op2, op1);
14168 /* Convert A ? 1 : 0 to simply A. */
14169 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14170 : (integer_onep (op1)
14171 && !VECTOR_TYPE_P (type)))
14172 && integer_zerop (op2)
14173 /* If we try to convert OP0 to our type, the
14174 call to fold will try to move the conversion inside
14175 a COND, which will recurse. In that case, the COND_EXPR
14176 is probably the best choice, so leave it alone. */
14177 && type == TREE_TYPE (arg0))
14178 return pedantic_non_lvalue_loc (loc, arg0);
14180 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14181 over COND_EXPR in cases such as floating point comparisons. */
14182 if (integer_zerop (op1)
14183 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14184 : (integer_onep (op2)
14185 && !VECTOR_TYPE_P (type)))
14186 && truth_value_p (TREE_CODE (arg0)))
14187 return pedantic_non_lvalue_loc (loc,
14188 fold_convert_loc (loc, type,
14189 invert_truthvalue_loc (loc,
14190 arg0)));
14192 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14193 if (TREE_CODE (arg0) == LT_EXPR
14194 && integer_zerop (TREE_OPERAND (arg0, 1))
14195 && integer_zerop (op2)
14196 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14198 /* sign_bit_p only checks ARG1 bits within A's precision.
14199 If <sign bit of A> has wider type than A, bits outside
14200 of A's precision in <sign bit of A> need to be checked.
14201 If they are all 0, this optimization needs to be done
14202 in unsigned A's type, if they are all 1 in signed A's type,
14203 otherwise this can't be done. */
14204 if (TYPE_PRECISION (TREE_TYPE (tem))
14205 < TYPE_PRECISION (TREE_TYPE (arg1))
14206 && TYPE_PRECISION (TREE_TYPE (tem))
14207 < TYPE_PRECISION (type))
14209 unsigned HOST_WIDE_INT mask_lo;
14210 HOST_WIDE_INT mask_hi;
14211 int inner_width, outer_width;
14212 tree tem_type;
14214 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14215 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14216 if (outer_width > TYPE_PRECISION (type))
14217 outer_width = TYPE_PRECISION (type);
14219 if (outer_width > HOST_BITS_PER_WIDE_INT)
14221 mask_hi = (HOST_WIDE_INT_M1U
14222 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14223 mask_lo = -1;
14225 else
14227 mask_hi = 0;
14228 mask_lo = (HOST_WIDE_INT_M1U
14229 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14231 if (inner_width > HOST_BITS_PER_WIDE_INT)
14233 mask_hi &= ~(HOST_WIDE_INT_M1U
14234 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14235 mask_lo = 0;
14237 else
14238 mask_lo &= ~(HOST_WIDE_INT_M1U
14239 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14241 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14242 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14244 tem_type = signed_type_for (TREE_TYPE (tem));
14245 tem = fold_convert_loc (loc, tem_type, tem);
14247 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14248 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14250 tem_type = unsigned_type_for (TREE_TYPE (tem));
14251 tem = fold_convert_loc (loc, tem_type, tem);
14253 else
14254 tem = NULL;
14257 if (tem)
14258 return
14259 fold_convert_loc (loc, type,
14260 fold_build2_loc (loc, BIT_AND_EXPR,
14261 TREE_TYPE (tem), tem,
14262 fold_convert_loc (loc,
14263 TREE_TYPE (tem),
14264 arg1)));
14267 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14268 already handled above. */
14269 if (TREE_CODE (arg0) == BIT_AND_EXPR
14270 && integer_onep (TREE_OPERAND (arg0, 1))
14271 && integer_zerop (op2)
14272 && integer_pow2p (arg1))
14274 tree tem = TREE_OPERAND (arg0, 0);
14275 STRIP_NOPS (tem);
14276 if (TREE_CODE (tem) == RSHIFT_EXPR
14277 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14278 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14279 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14280 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14281 TREE_OPERAND (tem, 0), arg1);
14284 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14285 is probably obsolete because the first operand should be a
14286 truth value (that's why we have the two cases above), but let's
14287 leave it in until we can confirm this for all front-ends. */
14288 if (integer_zerop (op2)
14289 && TREE_CODE (arg0) == NE_EXPR
14290 && integer_zerop (TREE_OPERAND (arg0, 1))
14291 && integer_pow2p (arg1)
14292 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14293 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14294 arg1, OEP_ONLY_CONST))
14295 return pedantic_non_lvalue_loc (loc,
14296 fold_convert_loc (loc, type,
14297 TREE_OPERAND (arg0, 0)));
14299 /* Disable the transformations below for vectors, since
14300 fold_binary_op_with_conditional_arg may undo them immediately,
14301 yielding an infinite loop. */
14302 if (code == VEC_COND_EXPR)
14303 return NULL_TREE;
14305 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14306 if (integer_zerop (op2)
14307 && truth_value_p (TREE_CODE (arg0))
14308 && truth_value_p (TREE_CODE (arg1))
14309 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14310 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14311 : TRUTH_ANDIF_EXPR,
14312 type, fold_convert_loc (loc, type, arg0), arg1);
14314 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14315 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14316 && truth_value_p (TREE_CODE (arg0))
14317 && truth_value_p (TREE_CODE (arg1))
14318 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14320 location_t loc0 = expr_location_or (arg0, loc);
14321 /* Only perform transformation if ARG0 is easily inverted. */
14322 tem = fold_invert_truthvalue (loc0, arg0);
14323 if (tem)
14324 return fold_build2_loc (loc, code == VEC_COND_EXPR
14325 ? BIT_IOR_EXPR
14326 : TRUTH_ORIF_EXPR,
14327 type, fold_convert_loc (loc, type, tem),
14328 arg1);
14331 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14332 if (integer_zerop (arg1)
14333 && truth_value_p (TREE_CODE (arg0))
14334 && truth_value_p (TREE_CODE (op2))
14335 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14337 location_t loc0 = expr_location_or (arg0, loc);
14338 /* Only perform transformation if ARG0 is easily inverted. */
14339 tem = fold_invert_truthvalue (loc0, arg0);
14340 if (tem)
14341 return fold_build2_loc (loc, code == VEC_COND_EXPR
14342 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14343 type, fold_convert_loc (loc, type, tem),
14344 op2);
14347 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14348 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14349 && truth_value_p (TREE_CODE (arg0))
14350 && truth_value_p (TREE_CODE (op2))
14351 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14352 return fold_build2_loc (loc, code == VEC_COND_EXPR
14353 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14354 type, fold_convert_loc (loc, type, arg0), op2);
14356 return NULL_TREE;
14358 case CALL_EXPR:
14359 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14360 of fold_ternary on them. */
14361 gcc_unreachable ();
14363 case BIT_FIELD_REF:
14364 if ((TREE_CODE (arg0) == VECTOR_CST
14365 || (TREE_CODE (arg0) == CONSTRUCTOR
14366 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14367 && (type == TREE_TYPE (TREE_TYPE (arg0))
14368 || (TREE_CODE (type) == VECTOR_TYPE
14369 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14371 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14372 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14373 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14374 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14376 if (n != 0
14377 && (idx % width) == 0
14378 && (n % width) == 0
14379 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14381 idx = idx / width;
14382 n = n / width;
14384 if (TREE_CODE (arg0) == VECTOR_CST)
14386 if (n == 1)
14387 return VECTOR_CST_ELT (arg0, idx);
14389 tree *vals = XALLOCAVEC (tree, n);
14390 for (unsigned i = 0; i < n; ++i)
14391 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14392 return build_vector (type, vals);
14395 /* Constructor elements can be subvectors. */
14396 unsigned HOST_WIDE_INT k = 1;
14397 if (CONSTRUCTOR_NELTS (arg0) != 0)
14399 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14400 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14401 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14404 /* We keep an exact subset of the constructor elements. */
14405 if ((idx % k) == 0 && (n % k) == 0)
14407 if (CONSTRUCTOR_NELTS (arg0) == 0)
14408 return build_constructor (type, NULL);
14409 idx /= k;
14410 n /= k;
14411 if (n == 1)
14413 if (idx < CONSTRUCTOR_NELTS (arg0))
14414 return CONSTRUCTOR_ELT (arg0, idx)->value;
14415 return build_zero_cst (type);
14418 vec<constructor_elt, va_gc> *vals;
14419 vec_alloc (vals, n);
14420 for (unsigned i = 0;
14421 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14422 ++i)
14423 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14424 CONSTRUCTOR_ELT
14425 (arg0, idx + i)->value);
14426 return build_constructor (type, vals);
14428 /* The bitfield references a single constructor element. */
14429 else if (idx + n <= (idx / k + 1) * k)
14431 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14432 return build_zero_cst (type);
14433 else if (n == k)
14434 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14435 else
14436 return fold_build3_loc (loc, code, type,
14437 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14438 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14443 /* A bit-field-ref that referenced the full argument can be stripped. */
14444 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14445 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14446 && integer_zerop (op2))
14447 return fold_convert_loc (loc, type, arg0);
14449 /* On constants we can use native encode/interpret to constant
14450 fold (nearly) all BIT_FIELD_REFs. */
14451 if (CONSTANT_CLASS_P (arg0)
14452 && can_native_interpret_type_p (type)
14453 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14454 /* This limitation should not be necessary, we just need to
14455 round this up to mode size. */
14456 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14457 /* Need bit-shifting of the buffer to relax the following. */
14458 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14460 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14461 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14462 unsigned HOST_WIDE_INT clen;
14463 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14464 /* ??? We cannot tell native_encode_expr to start at
14465 some random byte only. So limit us to a reasonable amount
14466 of work. */
14467 if (clen <= 4096)
14469 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14470 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14471 if (len > 0
14472 && len * BITS_PER_UNIT >= bitpos + bitsize)
14474 tree v = native_interpret_expr (type,
14475 b + bitpos / BITS_PER_UNIT,
14476 bitsize / BITS_PER_UNIT);
14477 if (v)
14478 return v;
14483 return NULL_TREE;
14485 case FMA_EXPR:
14486 /* For integers we can decompose the FMA if possible. */
14487 if (TREE_CODE (arg0) == INTEGER_CST
14488 && TREE_CODE (arg1) == INTEGER_CST)
14489 return fold_build2_loc (loc, PLUS_EXPR, type,
14490 const_binop (MULT_EXPR, arg0, arg1), arg2);
14491 if (integer_zerop (arg2))
14492 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14494 return fold_fma (loc, type, arg0, arg1, arg2);
14496 case VEC_PERM_EXPR:
14497 if (TREE_CODE (arg2) == VECTOR_CST)
14499 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14500 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14501 tree t;
14502 bool need_mask_canon = false;
14503 bool all_in_vec0 = true;
14504 bool all_in_vec1 = true;
14505 bool maybe_identity = true;
14506 bool single_arg = (op0 == op1);
14507 bool changed = false;
14509 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14510 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14511 for (i = 0; i < nelts; i++)
14513 tree val = VECTOR_CST_ELT (arg2, i);
14514 if (TREE_CODE (val) != INTEGER_CST)
14515 return NULL_TREE;
14517 sel[i] = TREE_INT_CST_LOW (val) & mask;
14518 if (TREE_INT_CST_HIGH (val)
14519 || ((unsigned HOST_WIDE_INT)
14520 TREE_INT_CST_LOW (val) != sel[i]))
14521 need_mask_canon = true;
14523 if (sel[i] < nelts)
14524 all_in_vec1 = false;
14525 else
14526 all_in_vec0 = false;
14528 if ((sel[i] & (nelts-1)) != i)
14529 maybe_identity = false;
14532 if (maybe_identity)
14534 if (all_in_vec0)
14535 return op0;
14536 if (all_in_vec1)
14537 return op1;
14540 if (all_in_vec0)
14541 op1 = op0;
14542 else if (all_in_vec1)
14544 op0 = op1;
14545 for (i = 0; i < nelts; i++)
14546 sel[i] -= nelts;
14547 need_mask_canon = true;
14550 if ((TREE_CODE (op0) == VECTOR_CST
14551 || TREE_CODE (op0) == CONSTRUCTOR)
14552 && (TREE_CODE (op1) == VECTOR_CST
14553 || TREE_CODE (op1) == CONSTRUCTOR))
14555 t = fold_vec_perm (type, op0, op1, sel);
14556 if (t != NULL_TREE)
14557 return t;
14560 if (op0 == op1 && !single_arg)
14561 changed = true;
14563 if (need_mask_canon && arg2 == op2)
14565 tree *tsel = XALLOCAVEC (tree, nelts);
14566 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14567 for (i = 0; i < nelts; i++)
14568 tsel[i] = build_int_cst (eltype, sel[i]);
14569 op2 = build_vector (TREE_TYPE (arg2), tsel);
14570 changed = true;
14573 if (changed)
14574 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14576 return NULL_TREE;
14578 default:
14579 return NULL_TREE;
14580 } /* switch (code) */
14583 /* Perform constant folding and related simplification of EXPR.
14584 The related simplifications include x*1 => x, x*0 => 0, etc.,
14585 and application of the associative law.
14586 NOP_EXPR conversions may be removed freely (as long as we
14587 are careful not to change the type of the overall expression).
14588 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14589 but we can constant-fold them if they have constant operands. */
14591 #ifdef ENABLE_FOLD_CHECKING
14592 # define fold(x) fold_1 (x)
14593 static tree fold_1 (tree);
14594 static
14595 #endif
14596 tree
14597 fold (tree expr)
14599 const tree t = expr;
14600 enum tree_code code = TREE_CODE (t);
14601 enum tree_code_class kind = TREE_CODE_CLASS (code);
14602 tree tem;
14603 location_t loc = EXPR_LOCATION (expr);
14605 /* Return right away if a constant. */
14606 if (kind == tcc_constant)
14607 return t;
14609 /* CALL_EXPR-like objects with variable numbers of operands are
14610 treated specially. */
14611 if (kind == tcc_vl_exp)
14613 if (code == CALL_EXPR)
14615 tem = fold_call_expr (loc, expr, false);
14616 return tem ? tem : expr;
14618 return expr;
14621 if (IS_EXPR_CODE_CLASS (kind))
14623 tree type = TREE_TYPE (t);
14624 tree op0, op1, op2;
14626 switch (TREE_CODE_LENGTH (code))
14628 case 1:
14629 op0 = TREE_OPERAND (t, 0);
14630 tem = fold_unary_loc (loc, code, type, op0);
14631 return tem ? tem : expr;
14632 case 2:
14633 op0 = TREE_OPERAND (t, 0);
14634 op1 = TREE_OPERAND (t, 1);
14635 tem = fold_binary_loc (loc, code, type, op0, op1);
14636 return tem ? tem : expr;
14637 case 3:
14638 op0 = TREE_OPERAND (t, 0);
14639 op1 = TREE_OPERAND (t, 1);
14640 op2 = TREE_OPERAND (t, 2);
14641 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14642 return tem ? tem : expr;
14643 default:
14644 break;
14648 switch (code)
14650 case ARRAY_REF:
14652 tree op0 = TREE_OPERAND (t, 0);
14653 tree op1 = TREE_OPERAND (t, 1);
14655 if (TREE_CODE (op1) == INTEGER_CST
14656 && TREE_CODE (op0) == CONSTRUCTOR
14657 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14659 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14660 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14661 unsigned HOST_WIDE_INT begin = 0;
14663 /* Find a matching index by means of a binary search. */
14664 while (begin != end)
14666 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14667 tree index = (*elts)[middle].index;
14669 if (TREE_CODE (index) == INTEGER_CST
14670 && tree_int_cst_lt (index, op1))
14671 begin = middle + 1;
14672 else if (TREE_CODE (index) == INTEGER_CST
14673 && tree_int_cst_lt (op1, index))
14674 end = middle;
14675 else if (TREE_CODE (index) == RANGE_EXPR
14676 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14677 begin = middle + 1;
14678 else if (TREE_CODE (index) == RANGE_EXPR
14679 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14680 end = middle;
14681 else
14682 return (*elts)[middle].value;
14686 return t;
14689 /* Return a VECTOR_CST if possible. */
14690 case CONSTRUCTOR:
14692 tree type = TREE_TYPE (t);
14693 if (TREE_CODE (type) != VECTOR_TYPE)
14694 return t;
14696 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14697 unsigned HOST_WIDE_INT idx, pos = 0;
14698 tree value;
14700 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14702 if (!CONSTANT_CLASS_P (value))
14703 return t;
14704 if (TREE_CODE (value) == VECTOR_CST)
14706 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14707 vec[pos++] = VECTOR_CST_ELT (value, i);
14709 else
14710 vec[pos++] = value;
14712 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14713 vec[pos] = build_zero_cst (TREE_TYPE (type));
14715 return build_vector (type, vec);
14718 case CONST_DECL:
14719 return fold (DECL_INITIAL (t));
14721 default:
14722 return t;
14723 } /* switch (code) */
14726 #ifdef ENABLE_FOLD_CHECKING
14727 #undef fold
14729 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14730 hash_table <pointer_hash <tree_node> >);
14731 static void fold_check_failed (const_tree, const_tree);
14732 void print_fold_checksum (const_tree);
14734 /* When --enable-checking=fold, compute a digest of expr before
14735 and after actual fold call to see if fold did not accidentally
14736 change original expr. */
14738 tree
14739 fold (tree expr)
14741 tree ret;
14742 struct md5_ctx ctx;
14743 unsigned char checksum_before[16], checksum_after[16];
14744 hash_table <pointer_hash <tree_node> > ht;
14746 ht.create (32);
14747 md5_init_ctx (&ctx);
14748 fold_checksum_tree (expr, &ctx, ht);
14749 md5_finish_ctx (&ctx, checksum_before);
14750 ht.empty ();
14752 ret = fold_1 (expr);
14754 md5_init_ctx (&ctx);
14755 fold_checksum_tree (expr, &ctx, ht);
14756 md5_finish_ctx (&ctx, checksum_after);
14757 ht.dispose ();
14759 if (memcmp (checksum_before, checksum_after, 16))
14760 fold_check_failed (expr, ret);
14762 return ret;
14765 void
14766 print_fold_checksum (const_tree expr)
14768 struct md5_ctx ctx;
14769 unsigned char checksum[16], cnt;
14770 hash_table <pointer_hash <tree_node> > ht;
14772 ht.create (32);
14773 md5_init_ctx (&ctx);
14774 fold_checksum_tree (expr, &ctx, ht);
14775 md5_finish_ctx (&ctx, checksum);
14776 ht.dispose ();
14777 for (cnt = 0; cnt < 16; ++cnt)
14778 fprintf (stderr, "%02x", checksum[cnt]);
14779 putc ('\n', stderr);
14782 static void
14783 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14785 internal_error ("fold check: original tree changed by fold");
14788 static void
14789 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14790 hash_table <pointer_hash <tree_node> > ht)
14792 tree_node **slot;
14793 enum tree_code code;
14794 union tree_node buf;
14795 int i, len;
14797 recursive_label:
14798 if (expr == NULL)
14799 return;
14800 slot = ht.find_slot (expr, INSERT);
14801 if (*slot != NULL)
14802 return;
14803 *slot = CONST_CAST_TREE (expr);
14804 code = TREE_CODE (expr);
14805 if (TREE_CODE_CLASS (code) == tcc_declaration
14806 && DECL_ASSEMBLER_NAME_SET_P (expr))
14808 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14809 memcpy ((char *) &buf, expr, tree_size (expr));
14810 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14811 expr = (tree) &buf;
14813 else if (TREE_CODE_CLASS (code) == tcc_type
14814 && (TYPE_POINTER_TO (expr)
14815 || TYPE_REFERENCE_TO (expr)
14816 || TYPE_CACHED_VALUES_P (expr)
14817 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14818 || TYPE_NEXT_VARIANT (expr)))
14820 /* Allow these fields to be modified. */
14821 tree tmp;
14822 memcpy ((char *) &buf, expr, tree_size (expr));
14823 expr = tmp = (tree) &buf;
14824 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14825 TYPE_POINTER_TO (tmp) = NULL;
14826 TYPE_REFERENCE_TO (tmp) = NULL;
14827 TYPE_NEXT_VARIANT (tmp) = NULL;
14828 if (TYPE_CACHED_VALUES_P (tmp))
14830 TYPE_CACHED_VALUES_P (tmp) = 0;
14831 TYPE_CACHED_VALUES (tmp) = NULL;
14834 md5_process_bytes (expr, tree_size (expr), ctx);
14835 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14836 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14837 if (TREE_CODE_CLASS (code) != tcc_type
14838 && TREE_CODE_CLASS (code) != tcc_declaration
14839 && code != TREE_LIST
14840 && code != SSA_NAME
14841 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14842 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14843 switch (TREE_CODE_CLASS (code))
14845 case tcc_constant:
14846 switch (code)
14848 case STRING_CST:
14849 md5_process_bytes (TREE_STRING_POINTER (expr),
14850 TREE_STRING_LENGTH (expr), ctx);
14851 break;
14852 case COMPLEX_CST:
14853 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14854 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14855 break;
14856 case VECTOR_CST:
14857 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14858 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14859 break;
14860 default:
14861 break;
14863 break;
14864 case tcc_exceptional:
14865 switch (code)
14867 case TREE_LIST:
14868 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14869 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14870 expr = TREE_CHAIN (expr);
14871 goto recursive_label;
14872 break;
14873 case TREE_VEC:
14874 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14875 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14876 break;
14877 default:
14878 break;
14880 break;
14881 case tcc_expression:
14882 case tcc_reference:
14883 case tcc_comparison:
14884 case tcc_unary:
14885 case tcc_binary:
14886 case tcc_statement:
14887 case tcc_vl_exp:
14888 len = TREE_OPERAND_LENGTH (expr);
14889 for (i = 0; i < len; ++i)
14890 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14891 break;
14892 case tcc_declaration:
14893 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14894 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14895 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14897 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14898 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14899 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14900 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14901 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14903 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14904 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14906 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14908 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14909 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14910 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14912 break;
14913 case tcc_type:
14914 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14915 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14916 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14917 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14918 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14919 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14920 if (INTEGRAL_TYPE_P (expr)
14921 || SCALAR_FLOAT_TYPE_P (expr))
14923 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14924 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14926 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14927 if (TREE_CODE (expr) == RECORD_TYPE
14928 || TREE_CODE (expr) == UNION_TYPE
14929 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14930 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14931 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14932 break;
14933 default:
14934 break;
14938 /* Helper function for outputting the checksum of a tree T. When
14939 debugging with gdb, you can "define mynext" to be "next" followed
14940 by "call debug_fold_checksum (op0)", then just trace down till the
14941 outputs differ. */
14943 DEBUG_FUNCTION void
14944 debug_fold_checksum (const_tree t)
14946 int i;
14947 unsigned char checksum[16];
14948 struct md5_ctx ctx;
14949 hash_table <pointer_hash <tree_node> > ht;
14950 ht.create (32);
14952 md5_init_ctx (&ctx);
14953 fold_checksum_tree (t, &ctx, ht);
14954 md5_finish_ctx (&ctx, checksum);
14955 ht.empty ();
14957 for (i = 0; i < 16; i++)
14958 fprintf (stderr, "%d ", checksum[i]);
14960 fprintf (stderr, "\n");
14963 #endif
14965 /* Fold a unary tree expression with code CODE of type TYPE with an
14966 operand OP0. LOC is the location of the resulting expression.
14967 Return a folded expression if successful. Otherwise, return a tree
14968 expression with code CODE of type TYPE with an operand OP0. */
14970 tree
14971 fold_build1_stat_loc (location_t loc,
14972 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14974 tree tem;
14975 #ifdef ENABLE_FOLD_CHECKING
14976 unsigned char checksum_before[16], checksum_after[16];
14977 struct md5_ctx ctx;
14978 hash_table <pointer_hash <tree_node> > ht;
14980 ht.create (32);
14981 md5_init_ctx (&ctx);
14982 fold_checksum_tree (op0, &ctx, ht);
14983 md5_finish_ctx (&ctx, checksum_before);
14984 ht.empty ();
14985 #endif
14987 tem = fold_unary_loc (loc, code, type, op0);
14988 if (!tem)
14989 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14991 #ifdef ENABLE_FOLD_CHECKING
14992 md5_init_ctx (&ctx);
14993 fold_checksum_tree (op0, &ctx, ht);
14994 md5_finish_ctx (&ctx, checksum_after);
14995 ht.dispose ();
14997 if (memcmp (checksum_before, checksum_after, 16))
14998 fold_check_failed (op0, tem);
14999 #endif
15000 return tem;
15003 /* Fold a binary tree expression with code CODE of type TYPE with
15004 operands OP0 and OP1. LOC is the location of the resulting
15005 expression. Return a folded expression if successful. Otherwise,
15006 return a tree expression with code CODE of type TYPE with operands
15007 OP0 and OP1. */
15009 tree
15010 fold_build2_stat_loc (location_t loc,
15011 enum tree_code code, tree type, tree op0, tree op1
15012 MEM_STAT_DECL)
15014 tree tem;
15015 #ifdef ENABLE_FOLD_CHECKING
15016 unsigned char checksum_before_op0[16],
15017 checksum_before_op1[16],
15018 checksum_after_op0[16],
15019 checksum_after_op1[16];
15020 struct md5_ctx ctx;
15021 hash_table <pointer_hash <tree_node> > ht;
15023 ht.create (32);
15024 md5_init_ctx (&ctx);
15025 fold_checksum_tree (op0, &ctx, ht);
15026 md5_finish_ctx (&ctx, checksum_before_op0);
15027 ht.empty ();
15029 md5_init_ctx (&ctx);
15030 fold_checksum_tree (op1, &ctx, ht);
15031 md5_finish_ctx (&ctx, checksum_before_op1);
15032 ht.empty ();
15033 #endif
15035 tem = fold_binary_loc (loc, code, type, op0, op1);
15036 if (!tem)
15037 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15039 #ifdef ENABLE_FOLD_CHECKING
15040 md5_init_ctx (&ctx);
15041 fold_checksum_tree (op0, &ctx, ht);
15042 md5_finish_ctx (&ctx, checksum_after_op0);
15043 ht.empty ();
15045 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15046 fold_check_failed (op0, tem);
15048 md5_init_ctx (&ctx);
15049 fold_checksum_tree (op1, &ctx, ht);
15050 md5_finish_ctx (&ctx, checksum_after_op1);
15051 ht.dispose ();
15053 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15054 fold_check_failed (op1, tem);
15055 #endif
15056 return tem;
15059 /* Fold a ternary tree expression with code CODE of type TYPE with
15060 operands OP0, OP1, and OP2. Return a folded expression if
15061 successful. Otherwise, return a tree expression with code CODE of
15062 type TYPE with operands OP0, OP1, and OP2. */
15064 tree
15065 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15066 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15068 tree tem;
15069 #ifdef ENABLE_FOLD_CHECKING
15070 unsigned char checksum_before_op0[16],
15071 checksum_before_op1[16],
15072 checksum_before_op2[16],
15073 checksum_after_op0[16],
15074 checksum_after_op1[16],
15075 checksum_after_op2[16];
15076 struct md5_ctx ctx;
15077 hash_table <pointer_hash <tree_node> > ht;
15079 ht.create (32);
15080 md5_init_ctx (&ctx);
15081 fold_checksum_tree (op0, &ctx, ht);
15082 md5_finish_ctx (&ctx, checksum_before_op0);
15083 ht.empty ();
15085 md5_init_ctx (&ctx);
15086 fold_checksum_tree (op1, &ctx, ht);
15087 md5_finish_ctx (&ctx, checksum_before_op1);
15088 ht.empty ();
15090 md5_init_ctx (&ctx);
15091 fold_checksum_tree (op2, &ctx, ht);
15092 md5_finish_ctx (&ctx, checksum_before_op2);
15093 ht.empty ();
15094 #endif
15096 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15097 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15098 if (!tem)
15099 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15101 #ifdef ENABLE_FOLD_CHECKING
15102 md5_init_ctx (&ctx);
15103 fold_checksum_tree (op0, &ctx, ht);
15104 md5_finish_ctx (&ctx, checksum_after_op0);
15105 ht.empty ();
15107 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15108 fold_check_failed (op0, tem);
15110 md5_init_ctx (&ctx);
15111 fold_checksum_tree (op1, &ctx, ht);
15112 md5_finish_ctx (&ctx, checksum_after_op1);
15113 ht.empty ();
15115 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15116 fold_check_failed (op1, tem);
15118 md5_init_ctx (&ctx);
15119 fold_checksum_tree (op2, &ctx, ht);
15120 md5_finish_ctx (&ctx, checksum_after_op2);
15121 ht.dispose ();
15123 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15124 fold_check_failed (op2, tem);
15125 #endif
15126 return tem;
15129 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15130 arguments in ARGARRAY, and a null static chain.
15131 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15132 of type TYPE from the given operands as constructed by build_call_array. */
15134 tree
15135 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15136 int nargs, tree *argarray)
15138 tree tem;
15139 #ifdef ENABLE_FOLD_CHECKING
15140 unsigned char checksum_before_fn[16],
15141 checksum_before_arglist[16],
15142 checksum_after_fn[16],
15143 checksum_after_arglist[16];
15144 struct md5_ctx ctx;
15145 hash_table <pointer_hash <tree_node> > ht;
15146 int i;
15148 ht.create (32);
15149 md5_init_ctx (&ctx);
15150 fold_checksum_tree (fn, &ctx, ht);
15151 md5_finish_ctx (&ctx, checksum_before_fn);
15152 ht.empty ();
15154 md5_init_ctx (&ctx);
15155 for (i = 0; i < nargs; i++)
15156 fold_checksum_tree (argarray[i], &ctx, ht);
15157 md5_finish_ctx (&ctx, checksum_before_arglist);
15158 ht.empty ();
15159 #endif
15161 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15163 #ifdef ENABLE_FOLD_CHECKING
15164 md5_init_ctx (&ctx);
15165 fold_checksum_tree (fn, &ctx, ht);
15166 md5_finish_ctx (&ctx, checksum_after_fn);
15167 ht.empty ();
15169 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15170 fold_check_failed (fn, tem);
15172 md5_init_ctx (&ctx);
15173 for (i = 0; i < nargs; i++)
15174 fold_checksum_tree (argarray[i], &ctx, ht);
15175 md5_finish_ctx (&ctx, checksum_after_arglist);
15176 ht.dispose ();
15178 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15179 fold_check_failed (NULL_TREE, tem);
15180 #endif
15181 return tem;
15184 /* Perform constant folding and related simplification of initializer
15185 expression EXPR. These behave identically to "fold_buildN" but ignore
15186 potential run-time traps and exceptions that fold must preserve. */
15188 #define START_FOLD_INIT \
15189 int saved_signaling_nans = flag_signaling_nans;\
15190 int saved_trapping_math = flag_trapping_math;\
15191 int saved_rounding_math = flag_rounding_math;\
15192 int saved_trapv = flag_trapv;\
15193 int saved_folding_initializer = folding_initializer;\
15194 flag_signaling_nans = 0;\
15195 flag_trapping_math = 0;\
15196 flag_rounding_math = 0;\
15197 flag_trapv = 0;\
15198 folding_initializer = 1;
15200 #define END_FOLD_INIT \
15201 flag_signaling_nans = saved_signaling_nans;\
15202 flag_trapping_math = saved_trapping_math;\
15203 flag_rounding_math = saved_rounding_math;\
15204 flag_trapv = saved_trapv;\
15205 folding_initializer = saved_folding_initializer;
15207 tree
15208 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15209 tree type, tree op)
15211 tree result;
15212 START_FOLD_INIT;
15214 result = fold_build1_loc (loc, code, type, op);
15216 END_FOLD_INIT;
15217 return result;
15220 tree
15221 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15222 tree type, tree op0, tree op1)
15224 tree result;
15225 START_FOLD_INIT;
15227 result = fold_build2_loc (loc, code, type, op0, op1);
15229 END_FOLD_INIT;
15230 return result;
15233 tree
15234 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15235 tree type, tree op0, tree op1, tree op2)
15237 tree result;
15238 START_FOLD_INIT;
15240 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15242 END_FOLD_INIT;
15243 return result;
15246 tree
15247 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15248 int nargs, tree *argarray)
15250 tree result;
15251 START_FOLD_INIT;
15253 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15255 END_FOLD_INIT;
15256 return result;
15259 #undef START_FOLD_INIT
15260 #undef END_FOLD_INIT
15262 /* Determine if first argument is a multiple of second argument. Return 0 if
15263 it is not, or we cannot easily determined it to be.
15265 An example of the sort of thing we care about (at this point; this routine
15266 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15267 fold cases do now) is discovering that
15269 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15271 is a multiple of
15273 SAVE_EXPR (J * 8)
15275 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15277 This code also handles discovering that
15279 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15281 is a multiple of 8 so we don't have to worry about dealing with a
15282 possible remainder.
15284 Note that we *look* inside a SAVE_EXPR only to determine how it was
15285 calculated; it is not safe for fold to do much of anything else with the
15286 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15287 at run time. For example, the latter example above *cannot* be implemented
15288 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15289 evaluation time of the original SAVE_EXPR is not necessarily the same at
15290 the time the new expression is evaluated. The only optimization of this
15291 sort that would be valid is changing
15293 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15295 divided by 8 to
15297 SAVE_EXPR (I) * SAVE_EXPR (J)
15299 (where the same SAVE_EXPR (J) is used in the original and the
15300 transformed version). */
15303 multiple_of_p (tree type, const_tree top, const_tree bottom)
15305 if (operand_equal_p (top, bottom, 0))
15306 return 1;
15308 if (TREE_CODE (type) != INTEGER_TYPE)
15309 return 0;
15311 switch (TREE_CODE (top))
15313 case BIT_AND_EXPR:
15314 /* Bitwise and provides a power of two multiple. If the mask is
15315 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15316 if (!integer_pow2p (bottom))
15317 return 0;
15318 /* FALLTHRU */
15320 case MULT_EXPR:
15321 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15322 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15324 case PLUS_EXPR:
15325 case MINUS_EXPR:
15326 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15327 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15329 case LSHIFT_EXPR:
15330 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15332 tree op1, t1;
15334 op1 = TREE_OPERAND (top, 1);
15335 /* const_binop may not detect overflow correctly,
15336 so check for it explicitly here. */
15337 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15338 > TREE_INT_CST_LOW (op1)
15339 && TREE_INT_CST_HIGH (op1) == 0
15340 && 0 != (t1 = fold_convert (type,
15341 const_binop (LSHIFT_EXPR,
15342 size_one_node,
15343 op1)))
15344 && !TREE_OVERFLOW (t1))
15345 return multiple_of_p (type, t1, bottom);
15347 return 0;
15349 case NOP_EXPR:
15350 /* Can't handle conversions from non-integral or wider integral type. */
15351 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15352 || (TYPE_PRECISION (type)
15353 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15354 return 0;
15356 /* .. fall through ... */
15358 case SAVE_EXPR:
15359 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15361 case COND_EXPR:
15362 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15363 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15365 case INTEGER_CST:
15366 if (TREE_CODE (bottom) != INTEGER_CST
15367 || integer_zerop (bottom)
15368 || (TYPE_UNSIGNED (type)
15369 && (tree_int_cst_sgn (top) < 0
15370 || tree_int_cst_sgn (bottom) < 0)))
15371 return 0;
15372 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15373 top, bottom));
15375 default:
15376 return 0;
15380 /* Return true if CODE or TYPE is known to be non-negative. */
15382 static bool
15383 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15385 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15386 && truth_value_p (code))
15387 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15388 have a signed:1 type (where the value is -1 and 0). */
15389 return true;
15390 return false;
15393 /* Return true if (CODE OP0) is known to be non-negative. If the return
15394 value is based on the assumption that signed overflow is undefined,
15395 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15396 *STRICT_OVERFLOW_P. */
15398 bool
15399 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15400 bool *strict_overflow_p)
15402 if (TYPE_UNSIGNED (type))
15403 return true;
15405 switch (code)
15407 case ABS_EXPR:
15408 /* We can't return 1 if flag_wrapv is set because
15409 ABS_EXPR<INT_MIN> = INT_MIN. */
15410 if (!INTEGRAL_TYPE_P (type))
15411 return true;
15412 if (TYPE_OVERFLOW_UNDEFINED (type))
15414 *strict_overflow_p = true;
15415 return true;
15417 break;
15419 case NON_LVALUE_EXPR:
15420 case FLOAT_EXPR:
15421 case FIX_TRUNC_EXPR:
15422 return tree_expr_nonnegative_warnv_p (op0,
15423 strict_overflow_p);
15425 case NOP_EXPR:
15427 tree inner_type = TREE_TYPE (op0);
15428 tree outer_type = type;
15430 if (TREE_CODE (outer_type) == REAL_TYPE)
15432 if (TREE_CODE (inner_type) == REAL_TYPE)
15433 return tree_expr_nonnegative_warnv_p (op0,
15434 strict_overflow_p);
15435 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15437 if (TYPE_UNSIGNED (inner_type))
15438 return true;
15439 return tree_expr_nonnegative_warnv_p (op0,
15440 strict_overflow_p);
15443 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15445 if (TREE_CODE (inner_type) == REAL_TYPE)
15446 return tree_expr_nonnegative_warnv_p (op0,
15447 strict_overflow_p);
15448 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15449 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15450 && TYPE_UNSIGNED (inner_type);
15453 break;
15455 default:
15456 return tree_simple_nonnegative_warnv_p (code, type);
15459 /* We don't know sign of `t', so be conservative and return false. */
15460 return false;
15463 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15464 value is based on the assumption that signed overflow is undefined,
15465 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15466 *STRICT_OVERFLOW_P. */
15468 bool
15469 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15470 tree op1, bool *strict_overflow_p)
15472 if (TYPE_UNSIGNED (type))
15473 return true;
15475 switch (code)
15477 case POINTER_PLUS_EXPR:
15478 case PLUS_EXPR:
15479 if (FLOAT_TYPE_P (type))
15480 return (tree_expr_nonnegative_warnv_p (op0,
15481 strict_overflow_p)
15482 && tree_expr_nonnegative_warnv_p (op1,
15483 strict_overflow_p));
15485 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15486 both unsigned and at least 2 bits shorter than the result. */
15487 if (TREE_CODE (type) == INTEGER_TYPE
15488 && TREE_CODE (op0) == NOP_EXPR
15489 && TREE_CODE (op1) == NOP_EXPR)
15491 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15492 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15493 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15494 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15496 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15497 TYPE_PRECISION (inner2)) + 1;
15498 return prec < TYPE_PRECISION (type);
15501 break;
15503 case MULT_EXPR:
15504 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15506 /* x * x is always non-negative for floating point x
15507 or without overflow. */
15508 if (operand_equal_p (op0, op1, 0)
15509 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15510 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15512 if (TYPE_OVERFLOW_UNDEFINED (type))
15513 *strict_overflow_p = true;
15514 return true;
15518 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15519 both unsigned and their total bits is shorter than the result. */
15520 if (TREE_CODE (type) == INTEGER_TYPE
15521 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15522 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15524 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15525 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15526 : TREE_TYPE (op0);
15527 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15528 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15529 : TREE_TYPE (op1);
15531 bool unsigned0 = TYPE_UNSIGNED (inner0);
15532 bool unsigned1 = TYPE_UNSIGNED (inner1);
15534 if (TREE_CODE (op0) == INTEGER_CST)
15535 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15537 if (TREE_CODE (op1) == INTEGER_CST)
15538 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15540 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15541 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15543 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15544 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15545 : TYPE_PRECISION (inner0);
15547 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15548 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15549 : TYPE_PRECISION (inner1);
15551 return precision0 + precision1 < TYPE_PRECISION (type);
15554 return false;
15556 case BIT_AND_EXPR:
15557 case MAX_EXPR:
15558 return (tree_expr_nonnegative_warnv_p (op0,
15559 strict_overflow_p)
15560 || tree_expr_nonnegative_warnv_p (op1,
15561 strict_overflow_p));
15563 case BIT_IOR_EXPR:
15564 case BIT_XOR_EXPR:
15565 case MIN_EXPR:
15566 case RDIV_EXPR:
15567 case TRUNC_DIV_EXPR:
15568 case CEIL_DIV_EXPR:
15569 case FLOOR_DIV_EXPR:
15570 case ROUND_DIV_EXPR:
15571 return (tree_expr_nonnegative_warnv_p (op0,
15572 strict_overflow_p)
15573 && tree_expr_nonnegative_warnv_p (op1,
15574 strict_overflow_p));
15576 case TRUNC_MOD_EXPR:
15577 case CEIL_MOD_EXPR:
15578 case FLOOR_MOD_EXPR:
15579 case ROUND_MOD_EXPR:
15580 return tree_expr_nonnegative_warnv_p (op0,
15581 strict_overflow_p);
15582 default:
15583 return tree_simple_nonnegative_warnv_p (code, type);
15586 /* We don't know sign of `t', so be conservative and return false. */
15587 return false;
15590 /* Return true if T is known to be non-negative. If the return
15591 value is based on the assumption that signed overflow is undefined,
15592 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15593 *STRICT_OVERFLOW_P. */
15595 bool
15596 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15598 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15599 return true;
15601 switch (TREE_CODE (t))
15603 case INTEGER_CST:
15604 return tree_int_cst_sgn (t) >= 0;
15606 case REAL_CST:
15607 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15609 case FIXED_CST:
15610 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15612 case COND_EXPR:
15613 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15614 strict_overflow_p)
15615 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15616 strict_overflow_p));
15617 default:
15618 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15619 TREE_TYPE (t));
15621 /* We don't know sign of `t', so be conservative and return false. */
15622 return false;
15625 /* Return true if T is known to be non-negative. If the return
15626 value is based on the assumption that signed overflow is undefined,
15627 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15628 *STRICT_OVERFLOW_P. */
15630 bool
15631 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15632 tree arg0, tree arg1, bool *strict_overflow_p)
15634 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15635 switch (DECL_FUNCTION_CODE (fndecl))
15637 CASE_FLT_FN (BUILT_IN_ACOS):
15638 CASE_FLT_FN (BUILT_IN_ACOSH):
15639 CASE_FLT_FN (BUILT_IN_CABS):
15640 CASE_FLT_FN (BUILT_IN_COSH):
15641 CASE_FLT_FN (BUILT_IN_ERFC):
15642 CASE_FLT_FN (BUILT_IN_EXP):
15643 CASE_FLT_FN (BUILT_IN_EXP10):
15644 CASE_FLT_FN (BUILT_IN_EXP2):
15645 CASE_FLT_FN (BUILT_IN_FABS):
15646 CASE_FLT_FN (BUILT_IN_FDIM):
15647 CASE_FLT_FN (BUILT_IN_HYPOT):
15648 CASE_FLT_FN (BUILT_IN_POW10):
15649 CASE_INT_FN (BUILT_IN_FFS):
15650 CASE_INT_FN (BUILT_IN_PARITY):
15651 CASE_INT_FN (BUILT_IN_POPCOUNT):
15652 CASE_INT_FN (BUILT_IN_CLZ):
15653 CASE_INT_FN (BUILT_IN_CLRSB):
15654 case BUILT_IN_BSWAP32:
15655 case BUILT_IN_BSWAP64:
15656 /* Always true. */
15657 return true;
15659 CASE_FLT_FN (BUILT_IN_SQRT):
15660 /* sqrt(-0.0) is -0.0. */
15661 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15662 return true;
15663 return tree_expr_nonnegative_warnv_p (arg0,
15664 strict_overflow_p);
15666 CASE_FLT_FN (BUILT_IN_ASINH):
15667 CASE_FLT_FN (BUILT_IN_ATAN):
15668 CASE_FLT_FN (BUILT_IN_ATANH):
15669 CASE_FLT_FN (BUILT_IN_CBRT):
15670 CASE_FLT_FN (BUILT_IN_CEIL):
15671 CASE_FLT_FN (BUILT_IN_ERF):
15672 CASE_FLT_FN (BUILT_IN_EXPM1):
15673 CASE_FLT_FN (BUILT_IN_FLOOR):
15674 CASE_FLT_FN (BUILT_IN_FMOD):
15675 CASE_FLT_FN (BUILT_IN_FREXP):
15676 CASE_FLT_FN (BUILT_IN_ICEIL):
15677 CASE_FLT_FN (BUILT_IN_IFLOOR):
15678 CASE_FLT_FN (BUILT_IN_IRINT):
15679 CASE_FLT_FN (BUILT_IN_IROUND):
15680 CASE_FLT_FN (BUILT_IN_LCEIL):
15681 CASE_FLT_FN (BUILT_IN_LDEXP):
15682 CASE_FLT_FN (BUILT_IN_LFLOOR):
15683 CASE_FLT_FN (BUILT_IN_LLCEIL):
15684 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15685 CASE_FLT_FN (BUILT_IN_LLRINT):
15686 CASE_FLT_FN (BUILT_IN_LLROUND):
15687 CASE_FLT_FN (BUILT_IN_LRINT):
15688 CASE_FLT_FN (BUILT_IN_LROUND):
15689 CASE_FLT_FN (BUILT_IN_MODF):
15690 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15691 CASE_FLT_FN (BUILT_IN_RINT):
15692 CASE_FLT_FN (BUILT_IN_ROUND):
15693 CASE_FLT_FN (BUILT_IN_SCALB):
15694 CASE_FLT_FN (BUILT_IN_SCALBLN):
15695 CASE_FLT_FN (BUILT_IN_SCALBN):
15696 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15697 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15698 CASE_FLT_FN (BUILT_IN_SINH):
15699 CASE_FLT_FN (BUILT_IN_TANH):
15700 CASE_FLT_FN (BUILT_IN_TRUNC):
15701 /* True if the 1st argument is nonnegative. */
15702 return tree_expr_nonnegative_warnv_p (arg0,
15703 strict_overflow_p);
15705 CASE_FLT_FN (BUILT_IN_FMAX):
15706 /* True if the 1st OR 2nd arguments are nonnegative. */
15707 return (tree_expr_nonnegative_warnv_p (arg0,
15708 strict_overflow_p)
15709 || (tree_expr_nonnegative_warnv_p (arg1,
15710 strict_overflow_p)));
15712 CASE_FLT_FN (BUILT_IN_FMIN):
15713 /* True if the 1st AND 2nd arguments are nonnegative. */
15714 return (tree_expr_nonnegative_warnv_p (arg0,
15715 strict_overflow_p)
15716 && (tree_expr_nonnegative_warnv_p (arg1,
15717 strict_overflow_p)));
15719 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15720 /* True if the 2nd argument is nonnegative. */
15721 return tree_expr_nonnegative_warnv_p (arg1,
15722 strict_overflow_p);
15724 CASE_FLT_FN (BUILT_IN_POWI):
15725 /* True if the 1st argument is nonnegative or the second
15726 argument is an even integer. */
15727 if (TREE_CODE (arg1) == INTEGER_CST
15728 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15729 return true;
15730 return tree_expr_nonnegative_warnv_p (arg0,
15731 strict_overflow_p);
15733 CASE_FLT_FN (BUILT_IN_POW):
15734 /* True if the 1st argument is nonnegative or the second
15735 argument is an even integer valued real. */
15736 if (TREE_CODE (arg1) == REAL_CST)
15738 REAL_VALUE_TYPE c;
15739 HOST_WIDE_INT n;
15741 c = TREE_REAL_CST (arg1);
15742 n = real_to_integer (&c);
15743 if ((n & 1) == 0)
15745 REAL_VALUE_TYPE cint;
15746 real_from_integer (&cint, VOIDmode, n,
15747 n < 0 ? -1 : 0, 0);
15748 if (real_identical (&c, &cint))
15749 return true;
15752 return tree_expr_nonnegative_warnv_p (arg0,
15753 strict_overflow_p);
15755 default:
15756 break;
15758 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15759 type);
15762 /* Return true if T is known to be non-negative. If the return
15763 value is based on the assumption that signed overflow is undefined,
15764 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15765 *STRICT_OVERFLOW_P. */
15767 bool
15768 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15770 enum tree_code code = TREE_CODE (t);
15771 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15772 return true;
15774 switch (code)
15776 case TARGET_EXPR:
15778 tree temp = TARGET_EXPR_SLOT (t);
15779 t = TARGET_EXPR_INITIAL (t);
15781 /* If the initializer is non-void, then it's a normal expression
15782 that will be assigned to the slot. */
15783 if (!VOID_TYPE_P (t))
15784 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15786 /* Otherwise, the initializer sets the slot in some way. One common
15787 way is an assignment statement at the end of the initializer. */
15788 while (1)
15790 if (TREE_CODE (t) == BIND_EXPR)
15791 t = expr_last (BIND_EXPR_BODY (t));
15792 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15793 || TREE_CODE (t) == TRY_CATCH_EXPR)
15794 t = expr_last (TREE_OPERAND (t, 0));
15795 else if (TREE_CODE (t) == STATEMENT_LIST)
15796 t = expr_last (t);
15797 else
15798 break;
15800 if (TREE_CODE (t) == MODIFY_EXPR
15801 && TREE_OPERAND (t, 0) == temp)
15802 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15803 strict_overflow_p);
15805 return false;
15808 case CALL_EXPR:
15810 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15811 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15813 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15814 get_callee_fndecl (t),
15815 arg0,
15816 arg1,
15817 strict_overflow_p);
15819 case COMPOUND_EXPR:
15820 case MODIFY_EXPR:
15821 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15822 strict_overflow_p);
15823 case BIND_EXPR:
15824 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15825 strict_overflow_p);
15826 case SAVE_EXPR:
15827 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15828 strict_overflow_p);
15830 default:
15831 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15832 TREE_TYPE (t));
15835 /* We don't know sign of `t', so be conservative and return false. */
15836 return false;
15839 /* Return true if T is known to be non-negative. If the return
15840 value is based on the assumption that signed overflow is undefined,
15841 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15842 *STRICT_OVERFLOW_P. */
15844 bool
15845 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15847 enum tree_code code;
15848 if (t == error_mark_node)
15849 return false;
15851 code = TREE_CODE (t);
15852 switch (TREE_CODE_CLASS (code))
15854 case tcc_binary:
15855 case tcc_comparison:
15856 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15857 TREE_TYPE (t),
15858 TREE_OPERAND (t, 0),
15859 TREE_OPERAND (t, 1),
15860 strict_overflow_p);
15862 case tcc_unary:
15863 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15864 TREE_TYPE (t),
15865 TREE_OPERAND (t, 0),
15866 strict_overflow_p);
15868 case tcc_constant:
15869 case tcc_declaration:
15870 case tcc_reference:
15871 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15873 default:
15874 break;
15877 switch (code)
15879 case TRUTH_AND_EXPR:
15880 case TRUTH_OR_EXPR:
15881 case TRUTH_XOR_EXPR:
15882 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15883 TREE_TYPE (t),
15884 TREE_OPERAND (t, 0),
15885 TREE_OPERAND (t, 1),
15886 strict_overflow_p);
15887 case TRUTH_NOT_EXPR:
15888 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15889 TREE_TYPE (t),
15890 TREE_OPERAND (t, 0),
15891 strict_overflow_p);
15893 case COND_EXPR:
15894 case CONSTRUCTOR:
15895 case OBJ_TYPE_REF:
15896 case ASSERT_EXPR:
15897 case ADDR_EXPR:
15898 case WITH_SIZE_EXPR:
15899 case SSA_NAME:
15900 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15902 default:
15903 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15907 /* Return true if `t' is known to be non-negative. Handle warnings
15908 about undefined signed overflow. */
15910 bool
15911 tree_expr_nonnegative_p (tree t)
15913 bool ret, strict_overflow_p;
15915 strict_overflow_p = false;
15916 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15917 if (strict_overflow_p)
15918 fold_overflow_warning (("assuming signed overflow does not occur when "
15919 "determining that expression is always "
15920 "non-negative"),
15921 WARN_STRICT_OVERFLOW_MISC);
15922 return ret;
15926 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15927 For floating point we further ensure that T is not denormal.
15928 Similar logic is present in nonzero_address in rtlanal.h.
15930 If the return value is based on the assumption that signed overflow
15931 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15932 change *STRICT_OVERFLOW_P. */
15934 bool
15935 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15936 bool *strict_overflow_p)
15938 switch (code)
15940 case ABS_EXPR:
15941 return tree_expr_nonzero_warnv_p (op0,
15942 strict_overflow_p);
15944 case NOP_EXPR:
15946 tree inner_type = TREE_TYPE (op0);
15947 tree outer_type = type;
15949 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15950 && tree_expr_nonzero_warnv_p (op0,
15951 strict_overflow_p));
15953 break;
15955 case NON_LVALUE_EXPR:
15956 return tree_expr_nonzero_warnv_p (op0,
15957 strict_overflow_p);
15959 default:
15960 break;
15963 return false;
15966 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15967 For floating point we further ensure that T is not denormal.
15968 Similar logic is present in nonzero_address in rtlanal.h.
15970 If the return value is based on the assumption that signed overflow
15971 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15972 change *STRICT_OVERFLOW_P. */
15974 bool
15975 tree_binary_nonzero_warnv_p (enum tree_code code,
15976 tree type,
15977 tree op0,
15978 tree op1, bool *strict_overflow_p)
15980 bool sub_strict_overflow_p;
15981 switch (code)
15983 case POINTER_PLUS_EXPR:
15984 case PLUS_EXPR:
15985 if (TYPE_OVERFLOW_UNDEFINED (type))
15987 /* With the presence of negative values it is hard
15988 to say something. */
15989 sub_strict_overflow_p = false;
15990 if (!tree_expr_nonnegative_warnv_p (op0,
15991 &sub_strict_overflow_p)
15992 || !tree_expr_nonnegative_warnv_p (op1,
15993 &sub_strict_overflow_p))
15994 return false;
15995 /* One of operands must be positive and the other non-negative. */
15996 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15997 overflows, on a twos-complement machine the sum of two
15998 nonnegative numbers can never be zero. */
15999 return (tree_expr_nonzero_warnv_p (op0,
16000 strict_overflow_p)
16001 || tree_expr_nonzero_warnv_p (op1,
16002 strict_overflow_p));
16004 break;
16006 case MULT_EXPR:
16007 if (TYPE_OVERFLOW_UNDEFINED (type))
16009 if (tree_expr_nonzero_warnv_p (op0,
16010 strict_overflow_p)
16011 && tree_expr_nonzero_warnv_p (op1,
16012 strict_overflow_p))
16014 *strict_overflow_p = true;
16015 return true;
16018 break;
16020 case MIN_EXPR:
16021 sub_strict_overflow_p = false;
16022 if (tree_expr_nonzero_warnv_p (op0,
16023 &sub_strict_overflow_p)
16024 && tree_expr_nonzero_warnv_p (op1,
16025 &sub_strict_overflow_p))
16027 if (sub_strict_overflow_p)
16028 *strict_overflow_p = true;
16030 break;
16032 case MAX_EXPR:
16033 sub_strict_overflow_p = false;
16034 if (tree_expr_nonzero_warnv_p (op0,
16035 &sub_strict_overflow_p))
16037 if (sub_strict_overflow_p)
16038 *strict_overflow_p = true;
16040 /* When both operands are nonzero, then MAX must be too. */
16041 if (tree_expr_nonzero_warnv_p (op1,
16042 strict_overflow_p))
16043 return true;
16045 /* MAX where operand 0 is positive is positive. */
16046 return tree_expr_nonnegative_warnv_p (op0,
16047 strict_overflow_p);
16049 /* MAX where operand 1 is positive is positive. */
16050 else if (tree_expr_nonzero_warnv_p (op1,
16051 &sub_strict_overflow_p)
16052 && tree_expr_nonnegative_warnv_p (op1,
16053 &sub_strict_overflow_p))
16055 if (sub_strict_overflow_p)
16056 *strict_overflow_p = true;
16057 return true;
16059 break;
16061 case BIT_IOR_EXPR:
16062 return (tree_expr_nonzero_warnv_p (op1,
16063 strict_overflow_p)
16064 || tree_expr_nonzero_warnv_p (op0,
16065 strict_overflow_p));
16067 default:
16068 break;
16071 return false;
16074 /* Return true when T is an address and is known to be nonzero.
16075 For floating point we further ensure that T is not denormal.
16076 Similar logic is present in nonzero_address in rtlanal.h.
16078 If the return value is based on the assumption that signed overflow
16079 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16080 change *STRICT_OVERFLOW_P. */
16082 bool
16083 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16085 bool sub_strict_overflow_p;
16086 switch (TREE_CODE (t))
16088 case INTEGER_CST:
16089 return !integer_zerop (t);
16091 case ADDR_EXPR:
16093 tree base = TREE_OPERAND (t, 0);
16094 if (!DECL_P (base))
16095 base = get_base_address (base);
16097 if (!base)
16098 return false;
16100 /* Weak declarations may link to NULL. Other things may also be NULL
16101 so protect with -fdelete-null-pointer-checks; but not variables
16102 allocated on the stack. */
16103 if (DECL_P (base)
16104 && (flag_delete_null_pointer_checks
16105 || (DECL_CONTEXT (base)
16106 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16107 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16108 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16110 /* Constants are never weak. */
16111 if (CONSTANT_CLASS_P (base))
16112 return true;
16114 return false;
16117 case COND_EXPR:
16118 sub_strict_overflow_p = false;
16119 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16120 &sub_strict_overflow_p)
16121 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16122 &sub_strict_overflow_p))
16124 if (sub_strict_overflow_p)
16125 *strict_overflow_p = true;
16126 return true;
16128 break;
16130 default:
16131 break;
16133 return false;
16136 /* Return true when T is an address and is known to be nonzero.
16137 For floating point we further ensure that T is not denormal.
16138 Similar logic is present in nonzero_address in rtlanal.h.
16140 If the return value is based on the assumption that signed overflow
16141 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16142 change *STRICT_OVERFLOW_P. */
16144 bool
16145 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16147 tree type = TREE_TYPE (t);
16148 enum tree_code code;
16150 /* Doing something useful for floating point would need more work. */
16151 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16152 return false;
16154 code = TREE_CODE (t);
16155 switch (TREE_CODE_CLASS (code))
16157 case tcc_unary:
16158 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16159 strict_overflow_p);
16160 case tcc_binary:
16161 case tcc_comparison:
16162 return tree_binary_nonzero_warnv_p (code, type,
16163 TREE_OPERAND (t, 0),
16164 TREE_OPERAND (t, 1),
16165 strict_overflow_p);
16166 case tcc_constant:
16167 case tcc_declaration:
16168 case tcc_reference:
16169 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16171 default:
16172 break;
16175 switch (code)
16177 case TRUTH_NOT_EXPR:
16178 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16179 strict_overflow_p);
16181 case TRUTH_AND_EXPR:
16182 case TRUTH_OR_EXPR:
16183 case TRUTH_XOR_EXPR:
16184 return tree_binary_nonzero_warnv_p (code, type,
16185 TREE_OPERAND (t, 0),
16186 TREE_OPERAND (t, 1),
16187 strict_overflow_p);
16189 case COND_EXPR:
16190 case CONSTRUCTOR:
16191 case OBJ_TYPE_REF:
16192 case ASSERT_EXPR:
16193 case ADDR_EXPR:
16194 case WITH_SIZE_EXPR:
16195 case SSA_NAME:
16196 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16198 case COMPOUND_EXPR:
16199 case MODIFY_EXPR:
16200 case BIND_EXPR:
16201 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16202 strict_overflow_p);
16204 case SAVE_EXPR:
16205 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16206 strict_overflow_p);
16208 case CALL_EXPR:
16209 return alloca_call_p (t);
16211 default:
16212 break;
16214 return false;
16217 /* Return true when T is an address and is known to be nonzero.
16218 Handle warnings about undefined signed overflow. */
16220 bool
16221 tree_expr_nonzero_p (tree t)
16223 bool ret, strict_overflow_p;
16225 strict_overflow_p = false;
16226 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16227 if (strict_overflow_p)
16228 fold_overflow_warning (("assuming signed overflow does not occur when "
16229 "determining that expression is always "
16230 "non-zero"),
16231 WARN_STRICT_OVERFLOW_MISC);
16232 return ret;
16235 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16236 attempt to fold the expression to a constant without modifying TYPE,
16237 OP0 or OP1.
16239 If the expression could be simplified to a constant, then return
16240 the constant. If the expression would not be simplified to a
16241 constant, then return NULL_TREE. */
16243 tree
16244 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16246 tree tem = fold_binary (code, type, op0, op1);
16247 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16250 /* Given the components of a unary expression CODE, TYPE and OP0,
16251 attempt to fold the expression to a constant without modifying
16252 TYPE or OP0.
16254 If the expression could be simplified to a constant, then return
16255 the constant. If the expression would not be simplified to a
16256 constant, then return NULL_TREE. */
16258 tree
16259 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16261 tree tem = fold_unary (code, type, op0);
16262 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16265 /* If EXP represents referencing an element in a constant string
16266 (either via pointer arithmetic or array indexing), return the
16267 tree representing the value accessed, otherwise return NULL. */
16269 tree
16270 fold_read_from_constant_string (tree exp)
16272 if ((TREE_CODE (exp) == INDIRECT_REF
16273 || TREE_CODE (exp) == ARRAY_REF)
16274 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16276 tree exp1 = TREE_OPERAND (exp, 0);
16277 tree index;
16278 tree string;
16279 location_t loc = EXPR_LOCATION (exp);
16281 if (TREE_CODE (exp) == INDIRECT_REF)
16282 string = string_constant (exp1, &index);
16283 else
16285 tree low_bound = array_ref_low_bound (exp);
16286 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16288 /* Optimize the special-case of a zero lower bound.
16290 We convert the low_bound to sizetype to avoid some problems
16291 with constant folding. (E.g. suppose the lower bound is 1,
16292 and its mode is QI. Without the conversion,l (ARRAY
16293 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16294 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16295 if (! integer_zerop (low_bound))
16296 index = size_diffop_loc (loc, index,
16297 fold_convert_loc (loc, sizetype, low_bound));
16299 string = exp1;
16302 if (string
16303 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16304 && TREE_CODE (string) == STRING_CST
16305 && TREE_CODE (index) == INTEGER_CST
16306 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16307 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16308 == MODE_INT)
16309 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16310 return build_int_cst_type (TREE_TYPE (exp),
16311 (TREE_STRING_POINTER (string)
16312 [TREE_INT_CST_LOW (index)]));
16314 return NULL;
16317 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16318 an integer constant, real, or fixed-point constant.
16320 TYPE is the type of the result. */
16322 static tree
16323 fold_negate_const (tree arg0, tree type)
16325 tree t = NULL_TREE;
16327 switch (TREE_CODE (arg0))
16329 case INTEGER_CST:
16331 double_int val = tree_to_double_int (arg0);
16332 bool overflow;
16333 val = val.neg_with_overflow (&overflow);
16334 t = force_fit_type_double (type, val, 1,
16335 (overflow | TREE_OVERFLOW (arg0))
16336 && !TYPE_UNSIGNED (type));
16337 break;
16340 case REAL_CST:
16341 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16342 break;
16344 case FIXED_CST:
16346 FIXED_VALUE_TYPE f;
16347 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16348 &(TREE_FIXED_CST (arg0)), NULL,
16349 TYPE_SATURATING (type));
16350 t = build_fixed (type, f);
16351 /* Propagate overflow flags. */
16352 if (overflow_p | TREE_OVERFLOW (arg0))
16353 TREE_OVERFLOW (t) = 1;
16354 break;
16357 default:
16358 gcc_unreachable ();
16361 return t;
16364 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16365 an integer constant or real constant.
16367 TYPE is the type of the result. */
16369 tree
16370 fold_abs_const (tree arg0, tree type)
16372 tree t = NULL_TREE;
16374 switch (TREE_CODE (arg0))
16376 case INTEGER_CST:
16378 double_int val = tree_to_double_int (arg0);
16380 /* If the value is unsigned or non-negative, then the absolute value
16381 is the same as the ordinary value. */
16382 if (TYPE_UNSIGNED (type)
16383 || !val.is_negative ())
16384 t = arg0;
16386 /* If the value is negative, then the absolute value is
16387 its negation. */
16388 else
16390 bool overflow;
16391 val = val.neg_with_overflow (&overflow);
16392 t = force_fit_type_double (type, val, -1,
16393 overflow | TREE_OVERFLOW (arg0));
16396 break;
16398 case REAL_CST:
16399 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16400 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16401 else
16402 t = arg0;
16403 break;
16405 default:
16406 gcc_unreachable ();
16409 return t;
16412 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16413 constant. TYPE is the type of the result. */
16415 static tree
16416 fold_not_const (const_tree arg0, tree type)
16418 double_int val;
16420 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16422 val = ~tree_to_double_int (arg0);
16423 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16426 /* Given CODE, a relational operator, the target type, TYPE and two
16427 constant operands OP0 and OP1, return the result of the
16428 relational operation. If the result is not a compile time
16429 constant, then return NULL_TREE. */
16431 static tree
16432 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16434 int result, invert;
16436 /* From here on, the only cases we handle are when the result is
16437 known to be a constant. */
16439 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16441 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16442 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16444 /* Handle the cases where either operand is a NaN. */
16445 if (real_isnan (c0) || real_isnan (c1))
16447 switch (code)
16449 case EQ_EXPR:
16450 case ORDERED_EXPR:
16451 result = 0;
16452 break;
16454 case NE_EXPR:
16455 case UNORDERED_EXPR:
16456 case UNLT_EXPR:
16457 case UNLE_EXPR:
16458 case UNGT_EXPR:
16459 case UNGE_EXPR:
16460 case UNEQ_EXPR:
16461 result = 1;
16462 break;
16464 case LT_EXPR:
16465 case LE_EXPR:
16466 case GT_EXPR:
16467 case GE_EXPR:
16468 case LTGT_EXPR:
16469 if (flag_trapping_math)
16470 return NULL_TREE;
16471 result = 0;
16472 break;
16474 default:
16475 gcc_unreachable ();
16478 return constant_boolean_node (result, type);
16481 return constant_boolean_node (real_compare (code, c0, c1), type);
16484 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16486 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16487 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16488 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16491 /* Handle equality/inequality of complex constants. */
16492 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16494 tree rcond = fold_relational_const (code, type,
16495 TREE_REALPART (op0),
16496 TREE_REALPART (op1));
16497 tree icond = fold_relational_const (code, type,
16498 TREE_IMAGPART (op0),
16499 TREE_IMAGPART (op1));
16500 if (code == EQ_EXPR)
16501 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16502 else if (code == NE_EXPR)
16503 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16504 else
16505 return NULL_TREE;
16508 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16510 unsigned count = VECTOR_CST_NELTS (op0);
16511 tree *elts = XALLOCAVEC (tree, count);
16512 gcc_assert (VECTOR_CST_NELTS (op1) == count
16513 && TYPE_VECTOR_SUBPARTS (type) == count);
16515 for (unsigned i = 0; i < count; i++)
16517 tree elem_type = TREE_TYPE (type);
16518 tree elem0 = VECTOR_CST_ELT (op0, i);
16519 tree elem1 = VECTOR_CST_ELT (op1, i);
16521 tree tem = fold_relational_const (code, elem_type,
16522 elem0, elem1);
16524 if (tem == NULL_TREE)
16525 return NULL_TREE;
16527 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16530 return build_vector (type, elts);
16533 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16535 To compute GT, swap the arguments and do LT.
16536 To compute GE, do LT and invert the result.
16537 To compute LE, swap the arguments, do LT and invert the result.
16538 To compute NE, do EQ and invert the result.
16540 Therefore, the code below must handle only EQ and LT. */
16542 if (code == LE_EXPR || code == GT_EXPR)
16544 tree tem = op0;
16545 op0 = op1;
16546 op1 = tem;
16547 code = swap_tree_comparison (code);
16550 /* Note that it is safe to invert for real values here because we
16551 have already handled the one case that it matters. */
16553 invert = 0;
16554 if (code == NE_EXPR || code == GE_EXPR)
16556 invert = 1;
16557 code = invert_tree_comparison (code, false);
16560 /* Compute a result for LT or EQ if args permit;
16561 Otherwise return T. */
16562 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16564 if (code == EQ_EXPR)
16565 result = tree_int_cst_equal (op0, op1);
16566 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16567 result = INT_CST_LT_UNSIGNED (op0, op1);
16568 else
16569 result = INT_CST_LT (op0, op1);
16571 else
16572 return NULL_TREE;
16574 if (invert)
16575 result ^= 1;
16576 return constant_boolean_node (result, type);
16579 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16580 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16581 itself. */
16583 tree
16584 fold_build_cleanup_point_expr (tree type, tree expr)
16586 /* If the expression does not have side effects then we don't have to wrap
16587 it with a cleanup point expression. */
16588 if (!TREE_SIDE_EFFECTS (expr))
16589 return expr;
16591 /* If the expression is a return, check to see if the expression inside the
16592 return has no side effects or the right hand side of the modify expression
16593 inside the return. If either don't have side effects set we don't need to
16594 wrap the expression in a cleanup point expression. Note we don't check the
16595 left hand side of the modify because it should always be a return decl. */
16596 if (TREE_CODE (expr) == RETURN_EXPR)
16598 tree op = TREE_OPERAND (expr, 0);
16599 if (!op || !TREE_SIDE_EFFECTS (op))
16600 return expr;
16601 op = TREE_OPERAND (op, 1);
16602 if (!TREE_SIDE_EFFECTS (op))
16603 return expr;
16606 return build1 (CLEANUP_POINT_EXPR, type, expr);
16609 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16610 of an indirection through OP0, or NULL_TREE if no simplification is
16611 possible. */
16613 tree
16614 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16616 tree sub = op0;
16617 tree subtype;
16619 STRIP_NOPS (sub);
16620 subtype = TREE_TYPE (sub);
16621 if (!POINTER_TYPE_P (subtype))
16622 return NULL_TREE;
16624 if (TREE_CODE (sub) == ADDR_EXPR)
16626 tree op = TREE_OPERAND (sub, 0);
16627 tree optype = TREE_TYPE (op);
16628 /* *&CONST_DECL -> to the value of the const decl. */
16629 if (TREE_CODE (op) == CONST_DECL)
16630 return DECL_INITIAL (op);
16631 /* *&p => p; make sure to handle *&"str"[cst] here. */
16632 if (type == optype)
16634 tree fop = fold_read_from_constant_string (op);
16635 if (fop)
16636 return fop;
16637 else
16638 return op;
16640 /* *(foo *)&fooarray => fooarray[0] */
16641 else if (TREE_CODE (optype) == ARRAY_TYPE
16642 && type == TREE_TYPE (optype)
16643 && (!in_gimple_form
16644 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16646 tree type_domain = TYPE_DOMAIN (optype);
16647 tree min_val = size_zero_node;
16648 if (type_domain && TYPE_MIN_VALUE (type_domain))
16649 min_val = TYPE_MIN_VALUE (type_domain);
16650 if (in_gimple_form
16651 && TREE_CODE (min_val) != INTEGER_CST)
16652 return NULL_TREE;
16653 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16654 NULL_TREE, NULL_TREE);
16656 /* *(foo *)&complexfoo => __real__ complexfoo */
16657 else if (TREE_CODE (optype) == COMPLEX_TYPE
16658 && type == TREE_TYPE (optype))
16659 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16660 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16661 else if (TREE_CODE (optype) == VECTOR_TYPE
16662 && type == TREE_TYPE (optype))
16664 tree part_width = TYPE_SIZE (type);
16665 tree index = bitsize_int (0);
16666 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16670 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16671 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16673 tree op00 = TREE_OPERAND (sub, 0);
16674 tree op01 = TREE_OPERAND (sub, 1);
16676 STRIP_NOPS (op00);
16677 if (TREE_CODE (op00) == ADDR_EXPR)
16679 tree op00type;
16680 op00 = TREE_OPERAND (op00, 0);
16681 op00type = TREE_TYPE (op00);
16683 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16684 if (TREE_CODE (op00type) == VECTOR_TYPE
16685 && type == TREE_TYPE (op00type))
16687 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16688 tree part_width = TYPE_SIZE (type);
16689 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16690 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16691 tree index = bitsize_int (indexi);
16693 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16694 return fold_build3_loc (loc,
16695 BIT_FIELD_REF, type, op00,
16696 part_width, index);
16699 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16700 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16701 && type == TREE_TYPE (op00type))
16703 tree size = TYPE_SIZE_UNIT (type);
16704 if (tree_int_cst_equal (size, op01))
16705 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16707 /* ((foo *)&fooarray)[1] => fooarray[1] */
16708 else if (TREE_CODE (op00type) == ARRAY_TYPE
16709 && type == TREE_TYPE (op00type))
16711 tree type_domain = TYPE_DOMAIN (op00type);
16712 tree min_val = size_zero_node;
16713 if (type_domain && TYPE_MIN_VALUE (type_domain))
16714 min_val = TYPE_MIN_VALUE (type_domain);
16715 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16716 TYPE_SIZE_UNIT (type));
16717 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16718 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16719 NULL_TREE, NULL_TREE);
16724 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16725 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16726 && type == TREE_TYPE (TREE_TYPE (subtype))
16727 && (!in_gimple_form
16728 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16730 tree type_domain;
16731 tree min_val = size_zero_node;
16732 sub = build_fold_indirect_ref_loc (loc, sub);
16733 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16734 if (type_domain && TYPE_MIN_VALUE (type_domain))
16735 min_val = TYPE_MIN_VALUE (type_domain);
16736 if (in_gimple_form
16737 && TREE_CODE (min_val) != INTEGER_CST)
16738 return NULL_TREE;
16739 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16740 NULL_TREE);
16743 return NULL_TREE;
16746 /* Builds an expression for an indirection through T, simplifying some
16747 cases. */
16749 tree
16750 build_fold_indirect_ref_loc (location_t loc, tree t)
16752 tree type = TREE_TYPE (TREE_TYPE (t));
16753 tree sub = fold_indirect_ref_1 (loc, type, t);
16755 if (sub)
16756 return sub;
16758 return build1_loc (loc, INDIRECT_REF, type, t);
16761 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16763 tree
16764 fold_indirect_ref_loc (location_t loc, tree t)
16766 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16768 if (sub)
16769 return sub;
16770 else
16771 return t;
16774 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16775 whose result is ignored. The type of the returned tree need not be
16776 the same as the original expression. */
16778 tree
16779 fold_ignored_result (tree t)
16781 if (!TREE_SIDE_EFFECTS (t))
16782 return integer_zero_node;
16784 for (;;)
16785 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16787 case tcc_unary:
16788 t = TREE_OPERAND (t, 0);
16789 break;
16791 case tcc_binary:
16792 case tcc_comparison:
16793 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16794 t = TREE_OPERAND (t, 0);
16795 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16796 t = TREE_OPERAND (t, 1);
16797 else
16798 return t;
16799 break;
16801 case tcc_expression:
16802 switch (TREE_CODE (t))
16804 case COMPOUND_EXPR:
16805 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16806 return t;
16807 t = TREE_OPERAND (t, 0);
16808 break;
16810 case COND_EXPR:
16811 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16812 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16813 return t;
16814 t = TREE_OPERAND (t, 0);
16815 break;
16817 default:
16818 return t;
16820 break;
16822 default:
16823 return t;
16827 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16828 This can only be applied to objects of a sizetype. */
16830 tree
16831 round_up_loc (location_t loc, tree value, int divisor)
16833 tree div = NULL_TREE;
16835 gcc_assert (divisor > 0);
16836 if (divisor == 1)
16837 return value;
16839 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16840 have to do anything. Only do this when we are not given a const,
16841 because in that case, this check is more expensive than just
16842 doing it. */
16843 if (TREE_CODE (value) != INTEGER_CST)
16845 div = build_int_cst (TREE_TYPE (value), divisor);
16847 if (multiple_of_p (TREE_TYPE (value), value, div))
16848 return value;
16851 /* If divisor is a power of two, simplify this to bit manipulation. */
16852 if (divisor == (divisor & -divisor))
16854 if (TREE_CODE (value) == INTEGER_CST)
16856 double_int val = tree_to_double_int (value);
16857 bool overflow_p;
16859 if ((val.low & (divisor - 1)) == 0)
16860 return value;
16862 overflow_p = TREE_OVERFLOW (value);
16863 val.low &= ~(divisor - 1);
16864 val.low += divisor;
16865 if (val.low == 0)
16867 val.high++;
16868 if (val.high == 0)
16869 overflow_p = true;
16872 return force_fit_type_double (TREE_TYPE (value), val,
16873 -1, overflow_p);
16875 else
16877 tree t;
16879 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16880 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16881 t = build_int_cst (TREE_TYPE (value), -divisor);
16882 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16885 else
16887 if (!div)
16888 div = build_int_cst (TREE_TYPE (value), divisor);
16889 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16890 value = size_binop_loc (loc, MULT_EXPR, value, div);
16893 return value;
16896 /* Likewise, but round down. */
16898 tree
16899 round_down_loc (location_t loc, tree value, int divisor)
16901 tree div = NULL_TREE;
16903 gcc_assert (divisor > 0);
16904 if (divisor == 1)
16905 return value;
16907 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16908 have to do anything. Only do this when we are not given a const,
16909 because in that case, this check is more expensive than just
16910 doing it. */
16911 if (TREE_CODE (value) != INTEGER_CST)
16913 div = build_int_cst (TREE_TYPE (value), divisor);
16915 if (multiple_of_p (TREE_TYPE (value), value, div))
16916 return value;
16919 /* If divisor is a power of two, simplify this to bit manipulation. */
16920 if (divisor == (divisor & -divisor))
16922 tree t;
16924 t = build_int_cst (TREE_TYPE (value), -divisor);
16925 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16927 else
16929 if (!div)
16930 div = build_int_cst (TREE_TYPE (value), divisor);
16931 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16932 value = size_binop_loc (loc, MULT_EXPR, value, div);
16935 return value;
16938 /* Returns the pointer to the base of the object addressed by EXP and
16939 extracts the information about the offset of the access, storing it
16940 to PBITPOS and POFFSET. */
16942 static tree
16943 split_address_to_core_and_offset (tree exp,
16944 HOST_WIDE_INT *pbitpos, tree *poffset)
16946 tree core;
16947 enum machine_mode mode;
16948 int unsignedp, volatilep;
16949 HOST_WIDE_INT bitsize;
16950 location_t loc = EXPR_LOCATION (exp);
16952 if (TREE_CODE (exp) == ADDR_EXPR)
16954 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16955 poffset, &mode, &unsignedp, &volatilep,
16956 false);
16957 core = build_fold_addr_expr_loc (loc, core);
16959 else
16961 core = exp;
16962 *pbitpos = 0;
16963 *poffset = NULL_TREE;
16966 return core;
16969 /* Returns true if addresses of E1 and E2 differ by a constant, false
16970 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16972 bool
16973 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16975 tree core1, core2;
16976 HOST_WIDE_INT bitpos1, bitpos2;
16977 tree toffset1, toffset2, tdiff, type;
16979 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16980 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16982 if (bitpos1 % BITS_PER_UNIT != 0
16983 || bitpos2 % BITS_PER_UNIT != 0
16984 || !operand_equal_p (core1, core2, 0))
16985 return false;
16987 if (toffset1 && toffset2)
16989 type = TREE_TYPE (toffset1);
16990 if (type != TREE_TYPE (toffset2))
16991 toffset2 = fold_convert (type, toffset2);
16993 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16994 if (!cst_and_fits_in_hwi (tdiff))
16995 return false;
16997 *diff = int_cst_value (tdiff);
16999 else if (toffset1 || toffset2)
17001 /* If only one of the offsets is non-constant, the difference cannot
17002 be a constant. */
17003 return false;
17005 else
17006 *diff = 0;
17008 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17009 return true;
17012 /* Simplify the floating point expression EXP when the sign of the
17013 result is not significant. Return NULL_TREE if no simplification
17014 is possible. */
17016 tree
17017 fold_strip_sign_ops (tree exp)
17019 tree arg0, arg1;
17020 location_t loc = EXPR_LOCATION (exp);
17022 switch (TREE_CODE (exp))
17024 case ABS_EXPR:
17025 case NEGATE_EXPR:
17026 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17027 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17029 case MULT_EXPR:
17030 case RDIV_EXPR:
17031 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17032 return NULL_TREE;
17033 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17034 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17035 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17036 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17037 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17038 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17039 break;
17041 case COMPOUND_EXPR:
17042 arg0 = TREE_OPERAND (exp, 0);
17043 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17044 if (arg1)
17045 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17046 break;
17048 case COND_EXPR:
17049 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17050 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17051 if (arg0 || arg1)
17052 return fold_build3_loc (loc,
17053 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17054 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17055 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17056 break;
17058 case CALL_EXPR:
17060 const enum built_in_function fcode = builtin_mathfn_code (exp);
17061 switch (fcode)
17063 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17064 /* Strip copysign function call, return the 1st argument. */
17065 arg0 = CALL_EXPR_ARG (exp, 0);
17066 arg1 = CALL_EXPR_ARG (exp, 1);
17067 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17069 default:
17070 /* Strip sign ops from the argument of "odd" math functions. */
17071 if (negate_mathfn_p (fcode))
17073 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17074 if (arg0)
17075 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17077 break;
17080 break;
17082 default:
17083 break;
17085 return NULL_TREE;