Merge trunk version 203514 into gupc branch.
[official-gcc.git] / gcc / fold-const.c
blob67e3145d859a63d9f1293f55f32b4b73684c558e
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-ssa.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case VECTOR_CST:
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
435 return true;
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case FLOOR_DIV_EXPR:
479 case CEIL_DIV_EXPR:
480 case EXACT_DIV_EXPR:
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
485 overflow. */
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
488 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
489 break;
490 /* If overflow is undefined then we have to be careful because
491 we ask whether it's ok to associate the negate with the
492 division which is not ok for example for
493 -((a - b) / c) where (-(a - b)) / c may invoke undefined
494 overflow because of negating INT_MIN. So do not use
495 negate_expr_p here but open-code the two important cases. */
496 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
497 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
498 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
499 return true;
501 else if (negate_expr_p (TREE_OPERAND (t, 0)))
502 return true;
503 return negate_expr_p (TREE_OPERAND (t, 1));
505 case NOP_EXPR:
506 /* Negate -((double)float) as (double)(-float). */
507 if (TREE_CODE (type) == REAL_TYPE)
509 tree tem = strip_float_extensions (t);
510 if (tem != t)
511 return negate_expr_p (tem);
513 break;
515 case CALL_EXPR:
516 /* Negate -f(x) as f(-x). */
517 if (negate_mathfn_p (builtin_mathfn_code (t)))
518 return negate_expr_p (CALL_EXPR_ARG (t, 0));
519 break;
521 case RSHIFT_EXPR:
522 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
523 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
525 tree op1 = TREE_OPERAND (t, 1);
526 if (TREE_INT_CST_HIGH (op1) == 0
527 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
528 == TREE_INT_CST_LOW (op1))
529 return true;
531 break;
533 default:
534 break;
536 return false;
539 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
540 simplification is possible.
541 If negate_expr_p would return true for T, NULL_TREE will never be
542 returned. */
544 static tree
545 fold_negate_expr (location_t loc, tree t)
547 tree type = TREE_TYPE (t);
548 tree tem;
550 switch (TREE_CODE (t))
552 /* Convert - (~A) to A + 1. */
553 case BIT_NOT_EXPR:
554 if (INTEGRAL_TYPE_P (type))
555 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
556 build_one_cst (type));
557 break;
559 case INTEGER_CST:
560 tem = fold_negate_const (t, type);
561 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
562 || !TYPE_OVERFLOW_TRAPS (type))
563 return tem;
564 break;
566 case REAL_CST:
567 tem = fold_negate_const (t, type);
568 /* Two's complement FP formats, such as c4x, may overflow. */
569 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
570 return tem;
571 break;
573 case FIXED_CST:
574 tem = fold_negate_const (t, type);
575 return tem;
577 case COMPLEX_CST:
579 tree rpart = negate_expr (TREE_REALPART (t));
580 tree ipart = negate_expr (TREE_IMAGPART (t));
582 if ((TREE_CODE (rpart) == REAL_CST
583 && TREE_CODE (ipart) == REAL_CST)
584 || (TREE_CODE (rpart) == INTEGER_CST
585 && TREE_CODE (ipart) == INTEGER_CST))
586 return build_complex (type, rpart, ipart);
588 break;
590 case VECTOR_CST:
592 int count = TYPE_VECTOR_SUBPARTS (type), i;
593 tree *elts = XALLOCAVEC (tree, count);
595 for (i = 0; i < count; i++)
597 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
598 if (elts[i] == NULL_TREE)
599 return NULL_TREE;
602 return build_vector (type, elts);
605 case COMPLEX_EXPR:
606 if (negate_expr_p (t))
607 return fold_build2_loc (loc, COMPLEX_EXPR, type,
608 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
609 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
610 break;
612 case CONJ_EXPR:
613 if (negate_expr_p (t))
614 return fold_build1_loc (loc, CONJ_EXPR, type,
615 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
616 break;
618 case NEGATE_EXPR:
619 return TREE_OPERAND (t, 0);
621 case PLUS_EXPR:
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
625 /* -(A + B) -> (-B) - A. */
626 if (negate_expr_p (TREE_OPERAND (t, 1))
627 && reorder_operands_p (TREE_OPERAND (t, 0),
628 TREE_OPERAND (t, 1)))
630 tem = negate_expr (TREE_OPERAND (t, 1));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 0));
635 /* -(A + B) -> (-A) - B. */
636 if (negate_expr_p (TREE_OPERAND (t, 0)))
638 tem = negate_expr (TREE_OPERAND (t, 0));
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 tem, TREE_OPERAND (t, 1));
643 break;
645 case MINUS_EXPR:
646 /* - (A - B) -> B - A */
647 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
648 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
649 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
650 return fold_build2_loc (loc, MINUS_EXPR, type,
651 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
652 break;
654 case MULT_EXPR:
655 if (TYPE_UNSIGNED (type))
656 break;
658 /* Fall through. */
660 case RDIV_EXPR:
661 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
663 tem = TREE_OPERAND (t, 1);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 TREE_OPERAND (t, 0), negate_expr (tem));
667 tem = TREE_OPERAND (t, 0);
668 if (negate_expr_p (tem))
669 return fold_build2_loc (loc, TREE_CODE (t), type,
670 negate_expr (tem), TREE_OPERAND (t, 1));
672 break;
674 case TRUNC_DIV_EXPR:
675 case ROUND_DIV_EXPR:
676 case FLOOR_DIV_EXPR:
677 case CEIL_DIV_EXPR:
678 case EXACT_DIV_EXPR:
679 /* In general we can't negate A / B, because if A is INT_MIN and
680 B is 1, we may turn this into INT_MIN / -1 which is undefined
681 and actually traps on some architectures. But if overflow is
682 undefined, we can negate, because - (INT_MIN / 1) is an
683 overflow. */
684 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
686 const char * const warnmsg = G_("assuming signed overflow does not "
687 "occur when negating a division");
688 tem = TREE_OPERAND (t, 1);
689 if (negate_expr_p (tem))
691 if (INTEGRAL_TYPE_P (type)
692 && (TREE_CODE (tem) != INTEGER_CST
693 || integer_onep (tem)))
694 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
695 return fold_build2_loc (loc, TREE_CODE (t), type,
696 TREE_OPERAND (t, 0), negate_expr (tem));
698 /* If overflow is undefined then we have to be careful because
699 we ask whether it's ok to associate the negate with the
700 division which is not ok for example for
701 -((a - b) / c) where (-(a - b)) / c may invoke undefined
702 overflow because of negating INT_MIN. So do not use
703 negate_expr_p here but open-code the two important cases. */
704 tem = TREE_OPERAND (t, 0);
705 if ((INTEGRAL_TYPE_P (type)
706 && (TREE_CODE (tem) == NEGATE_EXPR
707 || (TREE_CODE (tem) == INTEGER_CST
708 && may_negate_without_overflow_p (tem))))
709 || !INTEGRAL_TYPE_P (type))
710 return fold_build2_loc (loc, TREE_CODE (t), type,
711 negate_expr (tem), TREE_OPERAND (t, 1));
713 break;
715 case NOP_EXPR:
716 /* Convert -((double)float) into (double)(-float). */
717 if (TREE_CODE (type) == REAL_TYPE)
719 tem = strip_float_extensions (t);
720 if (tem != t && negate_expr_p (tem))
721 return fold_convert_loc (loc, type, negate_expr (tem));
723 break;
725 case CALL_EXPR:
726 /* Negate -f(x) as f(-x). */
727 if (negate_mathfn_p (builtin_mathfn_code (t))
728 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
730 tree fndecl, arg;
732 fndecl = get_callee_fndecl (t);
733 arg = negate_expr (CALL_EXPR_ARG (t, 0));
734 return build_call_expr_loc (loc, fndecl, 1, arg);
736 break;
738 case RSHIFT_EXPR:
739 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
740 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
742 tree op1 = TREE_OPERAND (t, 1);
743 if (TREE_INT_CST_HIGH (op1) == 0
744 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
745 == TREE_INT_CST_LOW (op1))
747 tree ntype = TYPE_UNSIGNED (type)
748 ? signed_type_for (type)
749 : unsigned_type_for (type);
750 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
751 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
752 return fold_convert_loc (loc, type, temp);
755 break;
757 default:
758 break;
761 return NULL_TREE;
764 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
765 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
766 return NULL_TREE. */
768 static tree
769 negate_expr (tree t)
771 tree type, tem;
772 location_t loc;
774 if (t == NULL_TREE)
775 return NULL_TREE;
777 loc = EXPR_LOCATION (t);
778 type = TREE_TYPE (t);
779 STRIP_SIGN_NOPS (t);
781 tem = fold_negate_expr (loc, t);
782 if (!tem)
783 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
784 return fold_convert_loc (loc, type, tem);
787 /* Split a tree IN into a constant, literal and variable parts that could be
788 combined with CODE to make IN. "constant" means an expression with
789 TREE_CONSTANT but that isn't an actual constant. CODE must be a
790 commutative arithmetic operation. Store the constant part into *CONP,
791 the literal in *LITP and return the variable part. If a part isn't
792 present, set it to null. If the tree does not decompose in this way,
793 return the entire tree as the variable part and the other parts as null.
795 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
796 case, we negate an operand that was subtracted. Except if it is a
797 literal for which we use *MINUS_LITP instead.
799 If NEGATE_P is true, we are negating all of IN, again except a literal
800 for which we use *MINUS_LITP instead.
802 If IN is itself a literal or constant, return it as appropriate.
804 Note that we do not guarantee that any of the three values will be the
805 same type as IN, but they will have the same signedness and mode. */
807 static tree
808 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
809 tree *minus_litp, int negate_p)
811 tree var = 0;
813 *conp = 0;
814 *litp = 0;
815 *minus_litp = 0;
817 /* Strip any conversions that don't change the machine mode or signedness. */
818 STRIP_SIGN_NOPS (in);
820 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
821 || TREE_CODE (in) == FIXED_CST)
822 *litp = in;
823 else if (TREE_CODE (in) == code
824 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
825 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
826 /* We can associate addition and subtraction together (even
827 though the C standard doesn't say so) for integers because
828 the value is not affected. For reals, the value might be
829 affected, so we can't. */
830 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
831 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
833 tree op0 = TREE_OPERAND (in, 0);
834 tree op1 = TREE_OPERAND (in, 1);
835 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
836 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
838 /* First see if either of the operands is a literal, then a constant. */
839 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
840 || TREE_CODE (op0) == FIXED_CST)
841 *litp = op0, op0 = 0;
842 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
843 || TREE_CODE (op1) == FIXED_CST)
844 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
846 if (op0 != 0 && TREE_CONSTANT (op0))
847 *conp = op0, op0 = 0;
848 else if (op1 != 0 && TREE_CONSTANT (op1))
849 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
851 /* If we haven't dealt with either operand, this is not a case we can
852 decompose. Otherwise, VAR is either of the ones remaining, if any. */
853 if (op0 != 0 && op1 != 0)
854 var = in;
855 else if (op0 != 0)
856 var = op0;
857 else
858 var = op1, neg_var_p = neg1_p;
860 /* Now do any needed negations. */
861 if (neg_litp_p)
862 *minus_litp = *litp, *litp = 0;
863 if (neg_conp_p)
864 *conp = negate_expr (*conp);
865 if (neg_var_p)
866 var = negate_expr (var);
868 else if (TREE_CODE (in) == BIT_NOT_EXPR
869 && code == PLUS_EXPR)
871 /* -X - 1 is folded to ~X, undo that here. */
872 *minus_litp = build_one_cst (TREE_TYPE (in));
873 var = negate_expr (TREE_OPERAND (in, 0));
875 else if (TREE_CONSTANT (in))
876 *conp = in;
877 else
878 var = in;
880 if (negate_p)
882 if (*litp)
883 *minus_litp = *litp, *litp = 0;
884 else if (*minus_litp)
885 *litp = *minus_litp, *minus_litp = 0;
886 *conp = negate_expr (*conp);
887 var = negate_expr (var);
890 return var;
893 /* Re-associate trees split by the above function. T1 and T2 are
894 either expressions to associate or null. Return the new
895 expression, if any. LOC is the location of the new expression. If
896 we build an operation, do it in TYPE and with CODE. */
898 static tree
899 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
901 if (t1 == 0)
902 return t2;
903 else if (t2 == 0)
904 return t1;
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 if (code == PLUS_EXPR)
914 if (TREE_CODE (t1) == NEGATE_EXPR)
915 return build2_loc (loc, MINUS_EXPR, type,
916 fold_convert_loc (loc, type, t2),
917 fold_convert_loc (loc, type,
918 TREE_OPERAND (t1, 0)));
919 else if (TREE_CODE (t2) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t1),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t2, 0)));
924 else if (integer_zerop (t2))
925 return fold_convert_loc (loc, type, t1);
927 else if (code == MINUS_EXPR)
929 if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
933 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 fold_convert_loc (loc, type, t2));
937 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942 for use in int_const_binop, size_binop and size_diffop. */
944 static bool
945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948 return false;
949 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950 return false;
952 switch (code)
954 case LSHIFT_EXPR:
955 case RSHIFT_EXPR:
956 case LROTATE_EXPR:
957 case RROTATE_EXPR:
958 return true;
960 default:
961 break;
964 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 && TYPE_MODE (type1) == TYPE_MODE (type2);
970 /* Combine two integer constants ARG1 and ARG2 under operation CODE
971 to produce a new constant. Return NULL_TREE if we don't know how
972 to evaluate CODE at compile-time. */
974 static tree
975 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
976 int overflowable)
978 double_int op1, op2, res, tmp;
979 tree t;
980 tree type = TREE_TYPE (arg1);
981 bool uns = TYPE_UNSIGNED (type);
982 bool overflow = false;
984 op1 = tree_to_double_int (arg1);
985 op2 = tree_to_double_int (arg2);
987 switch (code)
989 case BIT_IOR_EXPR:
990 res = op1 | op2;
991 break;
993 case BIT_XOR_EXPR:
994 res = op1 ^ op2;
995 break;
997 case BIT_AND_EXPR:
998 res = op1 & op2;
999 break;
1001 case RSHIFT_EXPR:
1002 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1003 break;
1005 case LSHIFT_EXPR:
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1010 break;
1012 case RROTATE_EXPR:
1013 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1014 break;
1016 case LROTATE_EXPR:
1017 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1018 break;
1020 case PLUS_EXPR:
1021 res = op1.add_with_sign (op2, false, &overflow);
1022 break;
1024 case MINUS_EXPR:
1025 res = op1.sub_with_overflow (op2, &overflow);
1026 break;
1028 case MULT_EXPR:
1029 res = op1.mul_with_sign (op2, false, &overflow);
1030 break;
1032 case MULT_HIGHPART_EXPR:
1033 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1035 bool dummy_overflow;
1036 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1037 return NULL_TREE;
1038 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1040 else
1042 bool dummy_overflow;
1043 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1044 is performed in twice the precision of arguments. */
1045 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1046 res = tmp.rshift (TYPE_PRECISION (type),
1047 2 * TYPE_PRECISION (type), !uns);
1049 break;
1051 case TRUNC_DIV_EXPR:
1052 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 /* This is a shortcut for a common special case. */
1055 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1056 && !TREE_OVERFLOW (arg1)
1057 && !TREE_OVERFLOW (arg2)
1058 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1060 if (code == CEIL_DIV_EXPR)
1061 op1.low += op2.low - 1;
1063 res.low = op1.low / op2.low, res.high = 0;
1064 break;
1067 /* ... fall through ... */
1069 case ROUND_DIV_EXPR:
1070 if (op2.is_zero ())
1071 return NULL_TREE;
1072 if (op2.is_one ())
1074 res = op1;
1075 break;
1077 if (op1 == op2 && !op1.is_zero ())
1079 res = double_int_one;
1080 break;
1082 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1083 break;
1085 case TRUNC_MOD_EXPR:
1086 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1087 /* This is a shortcut for a common special case. */
1088 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1089 && !TREE_OVERFLOW (arg1)
1090 && !TREE_OVERFLOW (arg2)
1091 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1093 if (code == CEIL_MOD_EXPR)
1094 op1.low += op2.low - 1;
1095 res.low = op1.low % op2.low, res.high = 0;
1096 break;
1099 /* ... fall through ... */
1101 case ROUND_MOD_EXPR:
1102 if (op2.is_zero ())
1103 return NULL_TREE;
1104 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1105 break;
1107 case MIN_EXPR:
1108 res = op1.min (op2, uns);
1109 break;
1111 case MAX_EXPR:
1112 res = op1.max (op2, uns);
1113 break;
1115 default:
1116 return NULL_TREE;
1119 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1120 (!uns && overflow)
1121 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1123 return t;
1126 tree
1127 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1129 return int_const_binop_1 (code, arg1, arg2, 1);
1132 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1133 constant. We assume ARG1 and ARG2 have the same data type, or at least
1134 are the same kind of constant and the same machine mode. Return zero if
1135 combining the constants is not allowed in the current operating mode. */
1137 static tree
1138 const_binop (enum tree_code code, tree arg1, tree arg2)
1140 /* Sanity check for the recursive cases. */
1141 if (!arg1 || !arg2)
1142 return NULL_TREE;
1144 STRIP_NOPS (arg1);
1145 STRIP_NOPS (arg2);
1147 if (TREE_CODE (arg1) == INTEGER_CST)
1148 return int_const_binop (code, arg1, arg2);
1150 if (TREE_CODE (arg1) == REAL_CST)
1152 enum machine_mode mode;
1153 REAL_VALUE_TYPE d1;
1154 REAL_VALUE_TYPE d2;
1155 REAL_VALUE_TYPE value;
1156 REAL_VALUE_TYPE result;
1157 bool inexact;
1158 tree t, type;
1160 /* The following codes are handled by real_arithmetic. */
1161 switch (code)
1163 case PLUS_EXPR:
1164 case MINUS_EXPR:
1165 case MULT_EXPR:
1166 case RDIV_EXPR:
1167 case MIN_EXPR:
1168 case MAX_EXPR:
1169 break;
1171 default:
1172 return NULL_TREE;
1175 d1 = TREE_REAL_CST (arg1);
1176 d2 = TREE_REAL_CST (arg2);
1178 type = TREE_TYPE (arg1);
1179 mode = TYPE_MODE (type);
1181 /* Don't perform operation if we honor signaling NaNs and
1182 either operand is a NaN. */
1183 if (HONOR_SNANS (mode)
1184 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1185 return NULL_TREE;
1187 /* Don't perform operation if it would raise a division
1188 by zero exception. */
1189 if (code == RDIV_EXPR
1190 && REAL_VALUES_EQUAL (d2, dconst0)
1191 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1192 return NULL_TREE;
1194 /* If either operand is a NaN, just return it. Otherwise, set up
1195 for floating-point trap; we return an overflow. */
1196 if (REAL_VALUE_ISNAN (d1))
1197 return arg1;
1198 else if (REAL_VALUE_ISNAN (d2))
1199 return arg2;
1201 inexact = real_arithmetic (&value, code, &d1, &d2);
1202 real_convert (&result, mode, &value);
1204 /* Don't constant fold this floating point operation if
1205 the result has overflowed and flag_trapping_math. */
1206 if (flag_trapping_math
1207 && MODE_HAS_INFINITIES (mode)
1208 && REAL_VALUE_ISINF (result)
1209 && !REAL_VALUE_ISINF (d1)
1210 && !REAL_VALUE_ISINF (d2))
1211 return NULL_TREE;
1213 /* Don't constant fold this floating point operation if the
1214 result may dependent upon the run-time rounding mode and
1215 flag_rounding_math is set, or if GCC's software emulation
1216 is unable to accurately represent the result. */
1217 if ((flag_rounding_math
1218 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1219 && (inexact || !real_identical (&result, &value)))
1220 return NULL_TREE;
1222 t = build_real (type, result);
1224 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1225 return t;
1228 if (TREE_CODE (arg1) == FIXED_CST)
1230 FIXED_VALUE_TYPE f1;
1231 FIXED_VALUE_TYPE f2;
1232 FIXED_VALUE_TYPE result;
1233 tree t, type;
1234 int sat_p;
1235 bool overflow_p;
1237 /* The following codes are handled by fixed_arithmetic. */
1238 switch (code)
1240 case PLUS_EXPR:
1241 case MINUS_EXPR:
1242 case MULT_EXPR:
1243 case TRUNC_DIV_EXPR:
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1249 f2.data.high = TREE_INT_CST_HIGH (arg2);
1250 f2.data.low = TREE_INT_CST_LOW (arg2);
1251 f2.mode = SImode;
1252 break;
1254 default:
1255 return NULL_TREE;
1258 f1 = TREE_FIXED_CST (arg1);
1259 type = TREE_TYPE (arg1);
1260 sat_p = TYPE_SATURATING (type);
1261 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1262 t = build_fixed (type, result);
1263 /* Propagate overflow flags. */
1264 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1265 TREE_OVERFLOW (t) = 1;
1266 return t;
1269 if (TREE_CODE (arg1) == COMPLEX_CST)
1271 tree type = TREE_TYPE (arg1);
1272 tree r1 = TREE_REALPART (arg1);
1273 tree i1 = TREE_IMAGPART (arg1);
1274 tree r2 = TREE_REALPART (arg2);
1275 tree i2 = TREE_IMAGPART (arg2);
1276 tree real, imag;
1278 switch (code)
1280 case PLUS_EXPR:
1281 case MINUS_EXPR:
1282 real = const_binop (code, r1, r2);
1283 imag = const_binop (code, i1, i2);
1284 break;
1286 case MULT_EXPR:
1287 if (COMPLEX_FLOAT_TYPE_P (type))
1288 return do_mpc_arg2 (arg1, arg2, type,
1289 /* do_nonfinite= */ folding_initializer,
1290 mpc_mul);
1292 real = const_binop (MINUS_EXPR,
1293 const_binop (MULT_EXPR, r1, r2),
1294 const_binop (MULT_EXPR, i1, i2));
1295 imag = const_binop (PLUS_EXPR,
1296 const_binop (MULT_EXPR, r1, i2),
1297 const_binop (MULT_EXPR, i1, r2));
1298 break;
1300 case RDIV_EXPR:
1301 if (COMPLEX_FLOAT_TYPE_P (type))
1302 return do_mpc_arg2 (arg1, arg2, type,
1303 /* do_nonfinite= */ folding_initializer,
1304 mpc_div);
1305 /* Fallthru ... */
1306 case TRUNC_DIV_EXPR:
1307 case CEIL_DIV_EXPR:
1308 case FLOOR_DIV_EXPR:
1309 case ROUND_DIV_EXPR:
1310 if (flag_complex_method == 0)
1312 /* Keep this algorithm in sync with
1313 tree-complex.c:expand_complex_div_straight().
1315 Expand complex division to scalars, straightforward algorithm.
1316 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1317 t = br*br + bi*bi
1319 tree magsquared
1320 = const_binop (PLUS_EXPR,
1321 const_binop (MULT_EXPR, r2, r2),
1322 const_binop (MULT_EXPR, i2, i2));
1323 tree t1
1324 = const_binop (PLUS_EXPR,
1325 const_binop (MULT_EXPR, r1, r2),
1326 const_binop (MULT_EXPR, i1, i2));
1327 tree t2
1328 = const_binop (MINUS_EXPR,
1329 const_binop (MULT_EXPR, i1, r2),
1330 const_binop (MULT_EXPR, r1, i2));
1332 real = const_binop (code, t1, magsquared);
1333 imag = const_binop (code, t2, magsquared);
1335 else
1337 /* Keep this algorithm in sync with
1338 tree-complex.c:expand_complex_div_wide().
1340 Expand complex division to scalars, modified algorithm to minimize
1341 overflow with wide input ranges. */
1342 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1343 fold_abs_const (r2, TREE_TYPE (type)),
1344 fold_abs_const (i2, TREE_TYPE (type)));
1346 if (integer_nonzerop (compare))
1348 /* In the TRUE branch, we compute
1349 ratio = br/bi;
1350 div = (br * ratio) + bi;
1351 tr = (ar * ratio) + ai;
1352 ti = (ai * ratio) - ar;
1353 tr = tr / div;
1354 ti = ti / div; */
1355 tree ratio = const_binop (code, r2, i2);
1356 tree div = const_binop (PLUS_EXPR, i2,
1357 const_binop (MULT_EXPR, r2, ratio));
1358 real = const_binop (MULT_EXPR, r1, ratio);
1359 real = const_binop (PLUS_EXPR, real, i1);
1360 real = const_binop (code, real, div);
1362 imag = const_binop (MULT_EXPR, i1, ratio);
1363 imag = const_binop (MINUS_EXPR, imag, r1);
1364 imag = const_binop (code, imag, div);
1366 else
1368 /* In the FALSE branch, we compute
1369 ratio = d/c;
1370 divisor = (d * ratio) + c;
1371 tr = (b * ratio) + a;
1372 ti = b - (a * ratio);
1373 tr = tr / div;
1374 ti = ti / div; */
1375 tree ratio = const_binop (code, i2, r2);
1376 tree div = const_binop (PLUS_EXPR, r2,
1377 const_binop (MULT_EXPR, i2, ratio));
1379 real = const_binop (MULT_EXPR, i1, ratio);
1380 real = const_binop (PLUS_EXPR, real, r1);
1381 real = const_binop (code, real, div);
1383 imag = const_binop (MULT_EXPR, r1, ratio);
1384 imag = const_binop (MINUS_EXPR, i1, imag);
1385 imag = const_binop (code, imag, div);
1388 break;
1390 default:
1391 return NULL_TREE;
1394 if (real && imag)
1395 return build_complex (type, real, imag);
1398 if (TREE_CODE (arg1) == VECTOR_CST
1399 && TREE_CODE (arg2) == VECTOR_CST)
1401 tree type = TREE_TYPE (arg1);
1402 int count = TYPE_VECTOR_SUBPARTS (type), i;
1403 tree *elts = XALLOCAVEC (tree, count);
1405 for (i = 0; i < count; i++)
1407 tree elem1 = VECTOR_CST_ELT (arg1, i);
1408 tree elem2 = VECTOR_CST_ELT (arg2, i);
1410 elts[i] = const_binop (code, elem1, elem2);
1412 /* It is possible that const_binop cannot handle the given
1413 code and return NULL_TREE */
1414 if (elts[i] == NULL_TREE)
1415 return NULL_TREE;
1418 return build_vector (type, elts);
1421 /* Shifts allow a scalar offset for a vector. */
1422 if (TREE_CODE (arg1) == VECTOR_CST
1423 && TREE_CODE (arg2) == INTEGER_CST)
1425 tree type = TREE_TYPE (arg1);
1426 int count = TYPE_VECTOR_SUBPARTS (type), i;
1427 tree *elts = XALLOCAVEC (tree, count);
1429 if (code == VEC_LSHIFT_EXPR
1430 || code == VEC_RSHIFT_EXPR)
1432 if (!host_integerp (arg2, 1))
1433 return NULL_TREE;
1435 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1436 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1437 unsigned HOST_WIDE_INT innerc
1438 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1439 if (shiftc >= outerc || (shiftc % innerc) != 0)
1440 return NULL_TREE;
1441 int offset = shiftc / innerc;
1442 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1443 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1444 for !BYTES_BIG_ENDIAN picks first vector element, but
1445 for BYTES_BIG_ENDIAN last element from the vector. */
1446 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1447 offset = -offset;
1448 tree zero = build_zero_cst (TREE_TYPE (type));
1449 for (i = 0; i < count; i++)
1451 if (i + offset < 0 || i + offset >= count)
1452 elts[i] = zero;
1453 else
1454 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1457 else
1458 for (i = 0; i < count; i++)
1460 tree elem1 = VECTOR_CST_ELT (arg1, i);
1462 elts[i] = const_binop (code, elem1, arg2);
1464 /* It is possible that const_binop cannot handle the given
1465 code and return NULL_TREE */
1466 if (elts[i] == NULL_TREE)
1467 return NULL_TREE;
1470 return build_vector (type, elts);
1472 return NULL_TREE;
1475 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1476 indicates which particular sizetype to create. */
1478 tree
1479 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1481 return build_int_cst (sizetype_tab[(int) kind], number);
1484 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1485 is a tree code. The type of the result is taken from the operands.
1486 Both must be equivalent integer types, ala int_binop_types_match_p.
1487 If the operands are constant, so is the result. */
1489 tree
1490 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1492 tree type = TREE_TYPE (arg0);
1494 if (arg0 == error_mark_node || arg1 == error_mark_node)
1495 return error_mark_node;
1497 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1498 TREE_TYPE (arg1)));
1500 /* Handle the special case of two integer constants faster. */
1501 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1503 /* And some specific cases even faster than that. */
1504 if (code == PLUS_EXPR)
1506 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1507 return arg1;
1508 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1509 return arg0;
1511 else if (code == MINUS_EXPR)
1513 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1514 return arg0;
1516 else if (code == MULT_EXPR)
1518 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1519 return arg1;
1522 /* Handle general case of two integer constants. For sizetype
1523 constant calculations we always want to know about overflow,
1524 even in the unsigned case. */
1525 return int_const_binop_1 (code, arg0, arg1, -1);
1528 return fold_build2_loc (loc, code, type, arg0, arg1);
1531 /* Given two values, either both of sizetype or both of bitsizetype,
1532 compute the difference between the two values. Return the value
1533 in signed type corresponding to the type of the operands. */
1535 tree
1536 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1538 tree type = TREE_TYPE (arg0);
1539 tree ctype;
1541 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1542 TREE_TYPE (arg1)));
1544 /* If the type is already signed, just do the simple thing. */
1545 if (!TYPE_UNSIGNED (type))
1546 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1548 if (type == sizetype)
1549 ctype = ssizetype;
1550 else if (type == bitsizetype)
1551 ctype = sbitsizetype;
1552 else
1553 ctype = signed_type_for (type);
1555 /* If either operand is not a constant, do the conversions to the signed
1556 type and subtract. The hardware will do the right thing with any
1557 overflow in the subtraction. */
1558 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1559 return size_binop_loc (loc, MINUS_EXPR,
1560 fold_convert_loc (loc, ctype, arg0),
1561 fold_convert_loc (loc, ctype, arg1));
1563 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1564 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1565 overflow) and negate (which can't either). Special-case a result
1566 of zero while we're here. */
1567 if (tree_int_cst_equal (arg0, arg1))
1568 return build_int_cst (ctype, 0);
1569 else if (tree_int_cst_lt (arg1, arg0))
1570 return fold_convert_loc (loc, ctype,
1571 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1572 else
1573 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1574 fold_convert_loc (loc, ctype,
1575 size_binop_loc (loc,
1576 MINUS_EXPR,
1577 arg1, arg0)));
1580 /* A subroutine of fold_convert_const handling conversions of an
1581 INTEGER_CST to another integer type. */
1583 static tree
1584 fold_convert_const_int_from_int (tree type, const_tree arg1)
1586 tree t;
1588 /* Given an integer constant, make new constant with new type,
1589 appropriately sign-extended or truncated. */
1590 t = force_fit_type_double (type, tree_to_double_int (arg1),
1591 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1592 (TREE_INT_CST_HIGH (arg1) < 0
1593 && (TYPE_UNSIGNED (type)
1594 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1595 | TREE_OVERFLOW (arg1));
1597 return t;
1600 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1601 to an integer type. */
1603 static tree
1604 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1606 int overflow = 0;
1607 tree t;
1609 /* The following code implements the floating point to integer
1610 conversion rules required by the Java Language Specification,
1611 that IEEE NaNs are mapped to zero and values that overflow
1612 the target precision saturate, i.e. values greater than
1613 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1614 are mapped to INT_MIN. These semantics are allowed by the
1615 C and C++ standards that simply state that the behavior of
1616 FP-to-integer conversion is unspecified upon overflow. */
1618 double_int val;
1619 REAL_VALUE_TYPE r;
1620 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1622 switch (code)
1624 case FIX_TRUNC_EXPR:
1625 real_trunc (&r, VOIDmode, &x);
1626 break;
1628 default:
1629 gcc_unreachable ();
1632 /* If R is NaN, return zero and show we have an overflow. */
1633 if (REAL_VALUE_ISNAN (r))
1635 overflow = 1;
1636 val = double_int_zero;
1639 /* See if R is less than the lower bound or greater than the
1640 upper bound. */
1642 if (! overflow)
1644 tree lt = TYPE_MIN_VALUE (type);
1645 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1646 if (REAL_VALUES_LESS (r, l))
1648 overflow = 1;
1649 val = tree_to_double_int (lt);
1653 if (! overflow)
1655 tree ut = TYPE_MAX_VALUE (type);
1656 if (ut)
1658 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1659 if (REAL_VALUES_LESS (u, r))
1661 overflow = 1;
1662 val = tree_to_double_int (ut);
1667 if (! overflow)
1668 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1670 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1671 return t;
1674 /* A subroutine of fold_convert_const handling conversions of a
1675 FIXED_CST to an integer type. */
1677 static tree
1678 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1680 tree t;
1681 double_int temp, temp_trunc;
1682 unsigned int mode;
1684 /* Right shift FIXED_CST to temp by fbit. */
1685 temp = TREE_FIXED_CST (arg1).data;
1686 mode = TREE_FIXED_CST (arg1).mode;
1687 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1689 temp = temp.rshift (GET_MODE_FBIT (mode),
1690 HOST_BITS_PER_DOUBLE_INT,
1691 SIGNED_FIXED_POINT_MODE_P (mode));
1693 /* Left shift temp to temp_trunc by fbit. */
1694 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1695 HOST_BITS_PER_DOUBLE_INT,
1696 SIGNED_FIXED_POINT_MODE_P (mode));
1698 else
1700 temp = double_int_zero;
1701 temp_trunc = double_int_zero;
1704 /* If FIXED_CST is negative, we need to round the value toward 0.
1705 By checking if the fractional bits are not zero to add 1 to temp. */
1706 if (SIGNED_FIXED_POINT_MODE_P (mode)
1707 && temp_trunc.is_negative ()
1708 && TREE_FIXED_CST (arg1).data != temp_trunc)
1709 temp += double_int_one;
1711 /* Given a fixed-point constant, make new constant with new type,
1712 appropriately sign-extended or truncated. */
1713 t = force_fit_type_double (type, temp, -1,
1714 (temp.is_negative ()
1715 && (TYPE_UNSIGNED (type)
1716 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1717 | TREE_OVERFLOW (arg1));
1719 return t;
1722 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1723 to another floating point type. */
1725 static tree
1726 fold_convert_const_real_from_real (tree type, const_tree arg1)
1728 REAL_VALUE_TYPE value;
1729 tree t;
1731 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1732 t = build_real (type, value);
1734 /* If converting an infinity or NAN to a representation that doesn't
1735 have one, set the overflow bit so that we can produce some kind of
1736 error message at the appropriate point if necessary. It's not the
1737 most user-friendly message, but it's better than nothing. */
1738 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1739 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1740 TREE_OVERFLOW (t) = 1;
1741 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1742 && !MODE_HAS_NANS (TYPE_MODE (type)))
1743 TREE_OVERFLOW (t) = 1;
1744 /* Regular overflow, conversion produced an infinity in a mode that
1745 can't represent them. */
1746 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1747 && REAL_VALUE_ISINF (value)
1748 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1749 TREE_OVERFLOW (t) = 1;
1750 else
1751 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1752 return t;
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to a floating point type. */
1758 static tree
1759 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1761 REAL_VALUE_TYPE value;
1762 tree t;
1764 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1765 t = build_real (type, value);
1767 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1768 return t;
1771 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1772 to another fixed-point type. */
1774 static tree
1775 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1777 FIXED_VALUE_TYPE value;
1778 tree t;
1779 bool overflow_p;
1781 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1782 TYPE_SATURATING (type));
1783 t = build_fixed (type, value);
1785 /* Propagate overflow flags. */
1786 if (overflow_p | TREE_OVERFLOW (arg1))
1787 TREE_OVERFLOW (t) = 1;
1788 return t;
1791 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1792 to a fixed-point type. */
1794 static tree
1795 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1797 FIXED_VALUE_TYPE value;
1798 tree t;
1799 bool overflow_p;
1801 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1802 TREE_INT_CST (arg1),
1803 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1804 TYPE_SATURATING (type));
1805 t = build_fixed (type, value);
1807 /* Propagate overflow flags. */
1808 if (overflow_p | TREE_OVERFLOW (arg1))
1809 TREE_OVERFLOW (t) = 1;
1810 return t;
1813 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1814 to a fixed-point type. */
1816 static tree
1817 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1819 FIXED_VALUE_TYPE value;
1820 tree t;
1821 bool overflow_p;
1823 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1824 &TREE_REAL_CST (arg1),
1825 TYPE_SATURATING (type));
1826 t = build_fixed (type, value);
1828 /* Propagate overflow flags. */
1829 if (overflow_p | TREE_OVERFLOW (arg1))
1830 TREE_OVERFLOW (t) = 1;
1831 return t;
1834 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1835 type TYPE. If no simplification can be done return NULL_TREE. */
1837 static tree
1838 fold_convert_const (enum tree_code code, tree type, tree arg1)
1840 if (TREE_TYPE (arg1) == type)
1841 return arg1;
1843 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1844 || TREE_CODE (type) == OFFSET_TYPE)
1846 if (TREE_CODE (arg1) == INTEGER_CST)
1847 return fold_convert_const_int_from_int (type, arg1);
1848 else if (TREE_CODE (arg1) == REAL_CST)
1849 return fold_convert_const_int_from_real (code, type, arg1);
1850 else if (TREE_CODE (arg1) == FIXED_CST)
1851 return fold_convert_const_int_from_fixed (type, arg1);
1853 else if (TREE_CODE (type) == REAL_TYPE)
1855 if (TREE_CODE (arg1) == INTEGER_CST)
1856 return build_real_from_int_cst (type, arg1);
1857 else if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_real_from_real (type, arg1);
1859 else if (TREE_CODE (arg1) == FIXED_CST)
1860 return fold_convert_const_real_from_fixed (type, arg1);
1862 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1864 if (TREE_CODE (arg1) == FIXED_CST)
1865 return fold_convert_const_fixed_from_fixed (type, arg1);
1866 else if (TREE_CODE (arg1) == INTEGER_CST)
1867 return fold_convert_const_fixed_from_int (type, arg1);
1868 else if (TREE_CODE (arg1) == REAL_CST)
1869 return fold_convert_const_fixed_from_real (type, arg1);
1871 return NULL_TREE;
1874 /* Construct a vector of zero elements of vector type TYPE. */
1876 static tree
1877 build_zero_vector (tree type)
1879 tree t;
1881 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1882 return build_vector_from_val (type, t);
1885 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1887 bool
1888 fold_convertible_p (const_tree type, const_tree arg)
1890 tree orig = TREE_TYPE (arg);
1892 if (type == orig)
1893 return true;
1895 if (TREE_CODE (arg) == ERROR_MARK
1896 || TREE_CODE (type) == ERROR_MARK
1897 || TREE_CODE (orig) == ERROR_MARK)
1898 return false;
1900 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1901 return true;
1903 switch (TREE_CODE (type))
1905 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1906 case POINTER_TYPE: case REFERENCE_TYPE:
1907 case OFFSET_TYPE:
1908 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1909 || TREE_CODE (orig) == OFFSET_TYPE)
1910 return true;
1911 return (TREE_CODE (orig) == VECTOR_TYPE
1912 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1914 case REAL_TYPE:
1915 case FIXED_POINT_TYPE:
1916 case COMPLEX_TYPE:
1917 case VECTOR_TYPE:
1918 case VOID_TYPE:
1919 return TREE_CODE (type) == TREE_CODE (orig);
1921 default:
1922 return false;
1926 /* Convert expression ARG to type TYPE. Used by the middle-end for
1927 simple conversions in preference to calling the front-end's convert. */
1929 tree
1930 fold_convert_loc (location_t loc, tree type, tree arg)
1932 tree orig = TREE_TYPE (arg);
1933 tree tem;
1935 if (type == orig)
1936 return arg;
1938 if (TREE_CODE (arg) == ERROR_MARK
1939 || TREE_CODE (type) == ERROR_MARK
1940 || TREE_CODE (orig) == ERROR_MARK)
1941 return error_mark_node;
1943 switch (TREE_CODE (type))
1945 case POINTER_TYPE:
1946 case REFERENCE_TYPE:
1947 /* Handle conversions between pointers to different address spaces. */
1948 if (POINTER_TYPE_P (orig)
1949 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1950 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1951 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1952 /* fall through */
1954 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1955 case OFFSET_TYPE:
1956 if (TREE_CODE (arg) == INTEGER_CST)
1958 tem = fold_convert_const (NOP_EXPR, type, arg);
1959 if (tem != NULL_TREE)
1960 return tem;
1962 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1963 || TREE_CODE (orig) == OFFSET_TYPE)
1964 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1965 if (TREE_CODE (orig) == COMPLEX_TYPE)
1966 return fold_convert_loc (loc, type,
1967 fold_build1_loc (loc, REALPART_EXPR,
1968 TREE_TYPE (orig), arg));
1969 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1970 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1971 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1973 case REAL_TYPE:
1974 if (TREE_CODE (arg) == INTEGER_CST)
1976 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1977 if (tem != NULL_TREE)
1978 return tem;
1980 else if (TREE_CODE (arg) == REAL_CST)
1982 tem = fold_convert_const (NOP_EXPR, type, arg);
1983 if (tem != NULL_TREE)
1984 return tem;
1986 else if (TREE_CODE (arg) == FIXED_CST)
1988 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1989 if (tem != NULL_TREE)
1990 return tem;
1993 switch (TREE_CODE (orig))
1995 case INTEGER_TYPE:
1996 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1997 case POINTER_TYPE: case REFERENCE_TYPE:
1998 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2000 case REAL_TYPE:
2001 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2003 case FIXED_POINT_TYPE:
2004 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2006 case COMPLEX_TYPE:
2007 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2008 return fold_convert_loc (loc, type, tem);
2010 default:
2011 gcc_unreachable ();
2014 case FIXED_POINT_TYPE:
2015 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2016 || TREE_CODE (arg) == REAL_CST)
2018 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2019 if (tem != NULL_TREE)
2020 goto fold_convert_exit;
2023 switch (TREE_CODE (orig))
2025 case FIXED_POINT_TYPE:
2026 case INTEGER_TYPE:
2027 case ENUMERAL_TYPE:
2028 case BOOLEAN_TYPE:
2029 case REAL_TYPE:
2030 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2032 case COMPLEX_TYPE:
2033 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2034 return fold_convert_loc (loc, type, tem);
2036 default:
2037 gcc_unreachable ();
2040 case COMPLEX_TYPE:
2041 switch (TREE_CODE (orig))
2043 case INTEGER_TYPE:
2044 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2045 case POINTER_TYPE: case REFERENCE_TYPE:
2046 case REAL_TYPE:
2047 case FIXED_POINT_TYPE:
2048 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2049 fold_convert_loc (loc, TREE_TYPE (type), arg),
2050 fold_convert_loc (loc, TREE_TYPE (type),
2051 integer_zero_node));
2052 case COMPLEX_TYPE:
2054 tree rpart, ipart;
2056 if (TREE_CODE (arg) == COMPLEX_EXPR)
2058 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2059 TREE_OPERAND (arg, 0));
2060 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2061 TREE_OPERAND (arg, 1));
2062 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2065 arg = save_expr (arg);
2066 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2067 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2068 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2069 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2070 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2073 default:
2074 gcc_unreachable ();
2077 case VECTOR_TYPE:
2078 if (integer_zerop (arg))
2079 return build_zero_vector (type);
2080 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2081 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2082 || TREE_CODE (orig) == VECTOR_TYPE);
2083 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2085 case VOID_TYPE:
2086 tem = fold_ignored_result (arg);
2087 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2089 default:
2090 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2091 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2092 gcc_unreachable ();
2094 fold_convert_exit:
2095 protected_set_expr_location_unshare (tem, loc);
2096 return tem;
2099 /* Return false if expr can be assumed not to be an lvalue, true
2100 otherwise. */
2102 static bool
2103 maybe_lvalue_p (const_tree x)
2105 /* We only need to wrap lvalue tree codes. */
2106 switch (TREE_CODE (x))
2108 case VAR_DECL:
2109 case PARM_DECL:
2110 case RESULT_DECL:
2111 case LABEL_DECL:
2112 case FUNCTION_DECL:
2113 case SSA_NAME:
2115 case COMPONENT_REF:
2116 case MEM_REF:
2117 case INDIRECT_REF:
2118 case ARRAY_REF:
2119 case ARRAY_RANGE_REF:
2120 case BIT_FIELD_REF:
2121 case OBJ_TYPE_REF:
2123 case REALPART_EXPR:
2124 case IMAGPART_EXPR:
2125 case PREINCREMENT_EXPR:
2126 case PREDECREMENT_EXPR:
2127 case SAVE_EXPR:
2128 case TRY_CATCH_EXPR:
2129 case WITH_CLEANUP_EXPR:
2130 case COMPOUND_EXPR:
2131 case MODIFY_EXPR:
2132 case TARGET_EXPR:
2133 case COND_EXPR:
2134 case BIND_EXPR:
2135 break;
2137 default:
2138 /* Assume the worst for front-end tree codes. */
2139 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2140 break;
2141 return false;
2144 return true;
2147 /* Return an expr equal to X but certainly not valid as an lvalue. */
2149 tree
2150 non_lvalue_loc (location_t loc, tree x)
2152 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2153 us. */
2154 if (in_gimple_form)
2155 return x;
2157 if (! maybe_lvalue_p (x))
2158 return x;
2159 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2162 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2163 Zero means allow extended lvalues. */
2165 int pedantic_lvalues;
2167 /* When pedantic, return an expr equal to X but certainly not valid as a
2168 pedantic lvalue. Otherwise, return X. */
2170 static tree
2171 pedantic_non_lvalue_loc (location_t loc, tree x)
2173 if (pedantic_lvalues)
2174 return non_lvalue_loc (loc, x);
2176 return protected_set_expr_location_unshare (x, loc);
2179 /* Given a tree comparison code, return the code that is the logical inverse.
2180 It is generally not safe to do this for floating-point comparisons, except
2181 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2182 ERROR_MARK in this case. */
2184 enum tree_code
2185 invert_tree_comparison (enum tree_code code, bool honor_nans)
2187 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2188 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2189 return ERROR_MARK;
2191 switch (code)
2193 case EQ_EXPR:
2194 return NE_EXPR;
2195 case NE_EXPR:
2196 return EQ_EXPR;
2197 case GT_EXPR:
2198 return honor_nans ? UNLE_EXPR : LE_EXPR;
2199 case GE_EXPR:
2200 return honor_nans ? UNLT_EXPR : LT_EXPR;
2201 case LT_EXPR:
2202 return honor_nans ? UNGE_EXPR : GE_EXPR;
2203 case LE_EXPR:
2204 return honor_nans ? UNGT_EXPR : GT_EXPR;
2205 case LTGT_EXPR:
2206 return UNEQ_EXPR;
2207 case UNEQ_EXPR:
2208 return LTGT_EXPR;
2209 case UNGT_EXPR:
2210 return LE_EXPR;
2211 case UNGE_EXPR:
2212 return LT_EXPR;
2213 case UNLT_EXPR:
2214 return GE_EXPR;
2215 case UNLE_EXPR:
2216 return GT_EXPR;
2217 case ORDERED_EXPR:
2218 return UNORDERED_EXPR;
2219 case UNORDERED_EXPR:
2220 return ORDERED_EXPR;
2221 default:
2222 gcc_unreachable ();
2226 /* Similar, but return the comparison that results if the operands are
2227 swapped. This is safe for floating-point. */
2229 enum tree_code
2230 swap_tree_comparison (enum tree_code code)
2232 switch (code)
2234 case EQ_EXPR:
2235 case NE_EXPR:
2236 case ORDERED_EXPR:
2237 case UNORDERED_EXPR:
2238 case LTGT_EXPR:
2239 case UNEQ_EXPR:
2240 return code;
2241 case GT_EXPR:
2242 return LT_EXPR;
2243 case GE_EXPR:
2244 return LE_EXPR;
2245 case LT_EXPR:
2246 return GT_EXPR;
2247 case LE_EXPR:
2248 return GE_EXPR;
2249 case UNGT_EXPR:
2250 return UNLT_EXPR;
2251 case UNGE_EXPR:
2252 return UNLE_EXPR;
2253 case UNLT_EXPR:
2254 return UNGT_EXPR;
2255 case UNLE_EXPR:
2256 return UNGE_EXPR;
2257 default:
2258 gcc_unreachable ();
2263 /* Convert a comparison tree code from an enum tree_code representation
2264 into a compcode bit-based encoding. This function is the inverse of
2265 compcode_to_comparison. */
2267 static enum comparison_code
2268 comparison_to_compcode (enum tree_code code)
2270 switch (code)
2272 case LT_EXPR:
2273 return COMPCODE_LT;
2274 case EQ_EXPR:
2275 return COMPCODE_EQ;
2276 case LE_EXPR:
2277 return COMPCODE_LE;
2278 case GT_EXPR:
2279 return COMPCODE_GT;
2280 case NE_EXPR:
2281 return COMPCODE_NE;
2282 case GE_EXPR:
2283 return COMPCODE_GE;
2284 case ORDERED_EXPR:
2285 return COMPCODE_ORD;
2286 case UNORDERED_EXPR:
2287 return COMPCODE_UNORD;
2288 case UNLT_EXPR:
2289 return COMPCODE_UNLT;
2290 case UNEQ_EXPR:
2291 return COMPCODE_UNEQ;
2292 case UNLE_EXPR:
2293 return COMPCODE_UNLE;
2294 case UNGT_EXPR:
2295 return COMPCODE_UNGT;
2296 case LTGT_EXPR:
2297 return COMPCODE_LTGT;
2298 case UNGE_EXPR:
2299 return COMPCODE_UNGE;
2300 default:
2301 gcc_unreachable ();
2305 /* Convert a compcode bit-based encoding of a comparison operator back
2306 to GCC's enum tree_code representation. This function is the
2307 inverse of comparison_to_compcode. */
2309 static enum tree_code
2310 compcode_to_comparison (enum comparison_code code)
2312 switch (code)
2314 case COMPCODE_LT:
2315 return LT_EXPR;
2316 case COMPCODE_EQ:
2317 return EQ_EXPR;
2318 case COMPCODE_LE:
2319 return LE_EXPR;
2320 case COMPCODE_GT:
2321 return GT_EXPR;
2322 case COMPCODE_NE:
2323 return NE_EXPR;
2324 case COMPCODE_GE:
2325 return GE_EXPR;
2326 case COMPCODE_ORD:
2327 return ORDERED_EXPR;
2328 case COMPCODE_UNORD:
2329 return UNORDERED_EXPR;
2330 case COMPCODE_UNLT:
2331 return UNLT_EXPR;
2332 case COMPCODE_UNEQ:
2333 return UNEQ_EXPR;
2334 case COMPCODE_UNLE:
2335 return UNLE_EXPR;
2336 case COMPCODE_UNGT:
2337 return UNGT_EXPR;
2338 case COMPCODE_LTGT:
2339 return LTGT_EXPR;
2340 case COMPCODE_UNGE:
2341 return UNGE_EXPR;
2342 default:
2343 gcc_unreachable ();
2347 /* Return a tree for the comparison which is the combination of
2348 doing the AND or OR (depending on CODE) of the two operations LCODE
2349 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2350 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2351 if this makes the transformation invalid. */
2353 tree
2354 combine_comparisons (location_t loc,
2355 enum tree_code code, enum tree_code lcode,
2356 enum tree_code rcode, tree truth_type,
2357 tree ll_arg, tree lr_arg)
2359 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2360 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2361 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2362 int compcode;
2364 switch (code)
2366 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2367 compcode = lcompcode & rcompcode;
2368 break;
2370 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2371 compcode = lcompcode | rcompcode;
2372 break;
2374 default:
2375 return NULL_TREE;
2378 if (!honor_nans)
2380 /* Eliminate unordered comparisons, as well as LTGT and ORD
2381 which are not used unless the mode has NaNs. */
2382 compcode &= ~COMPCODE_UNORD;
2383 if (compcode == COMPCODE_LTGT)
2384 compcode = COMPCODE_NE;
2385 else if (compcode == COMPCODE_ORD)
2386 compcode = COMPCODE_TRUE;
2388 else if (flag_trapping_math)
2390 /* Check that the original operation and the optimized ones will trap
2391 under the same condition. */
2392 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2393 && (lcompcode != COMPCODE_EQ)
2394 && (lcompcode != COMPCODE_ORD);
2395 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2396 && (rcompcode != COMPCODE_EQ)
2397 && (rcompcode != COMPCODE_ORD);
2398 bool trap = (compcode & COMPCODE_UNORD) == 0
2399 && (compcode != COMPCODE_EQ)
2400 && (compcode != COMPCODE_ORD);
2402 /* In a short-circuited boolean expression the LHS might be
2403 such that the RHS, if evaluated, will never trap. For
2404 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2405 if neither x nor y is NaN. (This is a mixed blessing: for
2406 example, the expression above will never trap, hence
2407 optimizing it to x < y would be invalid). */
2408 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2409 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2410 rtrap = false;
2412 /* If the comparison was short-circuited, and only the RHS
2413 trapped, we may now generate a spurious trap. */
2414 if (rtrap && !ltrap
2415 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2416 return NULL_TREE;
2418 /* If we changed the conditions that cause a trap, we lose. */
2419 if ((ltrap || rtrap) != trap)
2420 return NULL_TREE;
2423 if (compcode == COMPCODE_TRUE)
2424 return constant_boolean_node (true, truth_type);
2425 else if (compcode == COMPCODE_FALSE)
2426 return constant_boolean_node (false, truth_type);
2427 else
2429 enum tree_code tcode;
2431 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2432 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2436 /* Return nonzero if two operands (typically of the same tree node)
2437 are necessarily equal. If either argument has side-effects this
2438 function returns zero. FLAGS modifies behavior as follows:
2440 If OEP_ONLY_CONST is set, only return nonzero for constants.
2441 This function tests whether the operands are indistinguishable;
2442 it does not test whether they are equal using C's == operation.
2443 The distinction is important for IEEE floating point, because
2444 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2445 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2447 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2448 even though it may hold multiple values during a function.
2449 This is because a GCC tree node guarantees that nothing else is
2450 executed between the evaluation of its "operands" (which may often
2451 be evaluated in arbitrary order). Hence if the operands themselves
2452 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2453 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2454 unset means assuming isochronic (or instantaneous) tree equivalence.
2455 Unless comparing arbitrary expression trees, such as from different
2456 statements, this flag can usually be left unset.
2458 If OEP_PURE_SAME is set, then pure functions with identical arguments
2459 are considered the same. It is used when the caller has other ways
2460 to ensure that global memory is unchanged in between. */
2463 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2465 /* If either is ERROR_MARK, they aren't equal. */
2466 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2467 || TREE_TYPE (arg0) == error_mark_node
2468 || TREE_TYPE (arg1) == error_mark_node)
2469 return 0;
2471 /* Similar, if either does not have a type (like a released SSA name),
2472 they aren't equal. */
2473 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2474 return 0;
2476 /* Check equality of integer constants before bailing out due to
2477 precision differences. */
2478 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2479 return tree_int_cst_equal (arg0, arg1);
2481 /* If both types don't have the same signedness, then we can't consider
2482 them equal. We must check this before the STRIP_NOPS calls
2483 because they may change the signedness of the arguments. As pointers
2484 strictly don't have a signedness, require either two pointers or
2485 two non-pointers as well. */
2486 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2487 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2488 return 0;
2490 /* We cannot consider pointers to different address space equal. */
2491 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2492 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2493 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2494 return 0;
2496 /* If both types don't have the same precision, then it is not safe
2497 to strip NOPs. */
2498 if (element_precision (TREE_TYPE (arg0))
2499 != element_precision (TREE_TYPE (arg1)))
2500 return 0;
2502 STRIP_NOPS (arg0);
2503 STRIP_NOPS (arg1);
2505 /* In case both args are comparisons but with different comparison
2506 code, try to swap the comparison operands of one arg to produce
2507 a match and compare that variant. */
2508 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2509 && COMPARISON_CLASS_P (arg0)
2510 && COMPARISON_CLASS_P (arg1))
2512 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2514 if (TREE_CODE (arg0) == swap_code)
2515 return operand_equal_p (TREE_OPERAND (arg0, 0),
2516 TREE_OPERAND (arg1, 1), flags)
2517 && operand_equal_p (TREE_OPERAND (arg0, 1),
2518 TREE_OPERAND (arg1, 0), flags);
2521 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2522 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2523 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2524 return 0;
2526 /* This is needed for conversions and for COMPONENT_REF.
2527 Might as well play it safe and always test this. */
2528 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2529 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2530 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2531 return 0;
2533 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2534 We don't care about side effects in that case because the SAVE_EXPR
2535 takes care of that for us. In all other cases, two expressions are
2536 equal if they have no side effects. If we have two identical
2537 expressions with side effects that should be treated the same due
2538 to the only side effects being identical SAVE_EXPR's, that will
2539 be detected in the recursive calls below.
2540 If we are taking an invariant address of two identical objects
2541 they are necessarily equal as well. */
2542 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2543 && (TREE_CODE (arg0) == SAVE_EXPR
2544 || (flags & OEP_CONSTANT_ADDRESS_OF)
2545 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2546 return 1;
2548 /* Next handle constant cases, those for which we can return 1 even
2549 if ONLY_CONST is set. */
2550 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2551 switch (TREE_CODE (arg0))
2553 case INTEGER_CST:
2554 return tree_int_cst_equal (arg0, arg1);
2556 case FIXED_CST:
2557 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2558 TREE_FIXED_CST (arg1));
2560 case REAL_CST:
2561 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2562 TREE_REAL_CST (arg1)))
2563 return 1;
2566 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2568 /* If we do not distinguish between signed and unsigned zero,
2569 consider them equal. */
2570 if (real_zerop (arg0) && real_zerop (arg1))
2571 return 1;
2573 return 0;
2575 case VECTOR_CST:
2577 unsigned i;
2579 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2580 return 0;
2582 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2584 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2585 VECTOR_CST_ELT (arg1, i), flags))
2586 return 0;
2588 return 1;
2591 case COMPLEX_CST:
2592 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2593 flags)
2594 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2595 flags));
2597 case STRING_CST:
2598 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2599 && ! memcmp (TREE_STRING_POINTER (arg0),
2600 TREE_STRING_POINTER (arg1),
2601 TREE_STRING_LENGTH (arg0)));
2603 case ADDR_EXPR:
2604 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2605 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2606 ? OEP_CONSTANT_ADDRESS_OF : 0);
2607 default:
2608 break;
2611 if (flags & OEP_ONLY_CONST)
2612 return 0;
2614 /* Define macros to test an operand from arg0 and arg1 for equality and a
2615 variant that allows null and views null as being different from any
2616 non-null value. In the latter case, if either is null, the both
2617 must be; otherwise, do the normal comparison. */
2618 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2619 TREE_OPERAND (arg1, N), flags)
2621 #define OP_SAME_WITH_NULL(N) \
2622 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2623 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2625 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2627 case tcc_unary:
2628 /* Two conversions are equal only if signedness and modes match. */
2629 switch (TREE_CODE (arg0))
2631 CASE_CONVERT:
2632 case FIX_TRUNC_EXPR:
2633 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2634 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2635 return 0;
2636 break;
2637 default:
2638 break;
2641 return OP_SAME (0);
2644 case tcc_comparison:
2645 case tcc_binary:
2646 if (OP_SAME (0) && OP_SAME (1))
2647 return 1;
2649 /* For commutative ops, allow the other order. */
2650 return (commutative_tree_code (TREE_CODE (arg0))
2651 && operand_equal_p (TREE_OPERAND (arg0, 0),
2652 TREE_OPERAND (arg1, 1), flags)
2653 && operand_equal_p (TREE_OPERAND (arg0, 1),
2654 TREE_OPERAND (arg1, 0), flags));
2656 case tcc_reference:
2657 /* If either of the pointer (or reference) expressions we are
2658 dereferencing contain a side effect, these cannot be equal,
2659 but their addresses can be. */
2660 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2661 && (TREE_SIDE_EFFECTS (arg0)
2662 || TREE_SIDE_EFFECTS (arg1)))
2663 return 0;
2665 switch (TREE_CODE (arg0))
2667 case INDIRECT_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 return OP_SAME (0);
2671 case REALPART_EXPR:
2672 case IMAGPART_EXPR:
2673 return OP_SAME (0);
2675 case TARGET_MEM_REF:
2676 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2677 /* Require equal extra operands and then fall through to MEM_REF
2678 handling of the two common operands. */
2679 if (!OP_SAME_WITH_NULL (2)
2680 || !OP_SAME_WITH_NULL (3)
2681 || !OP_SAME_WITH_NULL (4))
2682 return 0;
2683 /* Fallthru. */
2684 case MEM_REF:
2685 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2686 /* Require equal access sizes, and similar pointer types.
2687 We can have incomplete types for array references of
2688 variable-sized arrays from the Fortran frontend
2689 though. Also verify the types are compatible. */
2690 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2691 || (TYPE_SIZE (TREE_TYPE (arg0))
2692 && TYPE_SIZE (TREE_TYPE (arg1))
2693 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2694 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2695 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2696 && alias_ptr_types_compatible_p
2697 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2698 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2699 && OP_SAME (0) && OP_SAME (1));
2701 case ARRAY_REF:
2702 case ARRAY_RANGE_REF:
2703 /* Operands 2 and 3 may be null.
2704 Compare the array index by value if it is constant first as we
2705 may have different types but same value here. */
2706 if (!OP_SAME (0))
2707 return 0;
2708 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2709 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2710 TREE_OPERAND (arg1, 1))
2711 || OP_SAME (1))
2712 && OP_SAME_WITH_NULL (2)
2713 && OP_SAME_WITH_NULL (3));
2715 case COMPONENT_REF:
2716 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2717 may be NULL when we're called to compare MEM_EXPRs. */
2718 if (!OP_SAME_WITH_NULL (0))
2719 return 0;
2720 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2721 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2723 case BIT_FIELD_REF:
2724 if (!OP_SAME (0))
2725 return 0;
2726 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2727 return OP_SAME (1) && OP_SAME (2);
2729 default:
2730 return 0;
2733 case tcc_expression:
2734 switch (TREE_CODE (arg0))
2736 case ADDR_EXPR:
2737 case TRUTH_NOT_EXPR:
2738 return OP_SAME (0);
2740 case TRUTH_ANDIF_EXPR:
2741 case TRUTH_ORIF_EXPR:
2742 return OP_SAME (0) && OP_SAME (1);
2744 case FMA_EXPR:
2745 case WIDEN_MULT_PLUS_EXPR:
2746 case WIDEN_MULT_MINUS_EXPR:
2747 if (!OP_SAME (2))
2748 return 0;
2749 /* The multiplcation operands are commutative. */
2750 /* FALLTHRU */
2752 case TRUTH_AND_EXPR:
2753 case TRUTH_OR_EXPR:
2754 case TRUTH_XOR_EXPR:
2755 if (OP_SAME (0) && OP_SAME (1))
2756 return 1;
2758 /* Otherwise take into account this is a commutative operation. */
2759 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2760 TREE_OPERAND (arg1, 1), flags)
2761 && operand_equal_p (TREE_OPERAND (arg0, 1),
2762 TREE_OPERAND (arg1, 0), flags));
2764 case COND_EXPR:
2765 case VEC_COND_EXPR:
2766 case DOT_PROD_EXPR:
2767 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2769 default:
2770 return 0;
2773 case tcc_vl_exp:
2774 switch (TREE_CODE (arg0))
2776 case CALL_EXPR:
2777 /* If the CALL_EXPRs call different functions, then they
2778 clearly can not be equal. */
2779 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2780 flags))
2781 return 0;
2784 unsigned int cef = call_expr_flags (arg0);
2785 if (flags & OEP_PURE_SAME)
2786 cef &= ECF_CONST | ECF_PURE;
2787 else
2788 cef &= ECF_CONST;
2789 if (!cef)
2790 return 0;
2793 /* Now see if all the arguments are the same. */
2795 const_call_expr_arg_iterator iter0, iter1;
2796 const_tree a0, a1;
2797 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2798 a1 = first_const_call_expr_arg (arg1, &iter1);
2799 a0 && a1;
2800 a0 = next_const_call_expr_arg (&iter0),
2801 a1 = next_const_call_expr_arg (&iter1))
2802 if (! operand_equal_p (a0, a1, flags))
2803 return 0;
2805 /* If we get here and both argument lists are exhausted
2806 then the CALL_EXPRs are equal. */
2807 return ! (a0 || a1);
2809 default:
2810 return 0;
2813 case tcc_declaration:
2814 /* Consider __builtin_sqrt equal to sqrt. */
2815 return (TREE_CODE (arg0) == FUNCTION_DECL
2816 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2817 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2818 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2820 default:
2821 return 0;
2824 #undef OP_SAME
2825 #undef OP_SAME_WITH_NULL
2828 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2829 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2831 When in doubt, return 0. */
2833 static int
2834 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2836 int unsignedp1, unsignedpo;
2837 tree primarg0, primarg1, primother;
2838 unsigned int correct_width;
2840 if (operand_equal_p (arg0, arg1, 0))
2841 return 1;
2843 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2844 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2845 return 0;
2847 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2848 and see if the inner values are the same. This removes any
2849 signedness comparison, which doesn't matter here. */
2850 primarg0 = arg0, primarg1 = arg1;
2851 STRIP_NOPS (primarg0);
2852 STRIP_NOPS (primarg1);
2853 if (operand_equal_p (primarg0, primarg1, 0))
2854 return 1;
2856 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2857 actual comparison operand, ARG0.
2859 First throw away any conversions to wider types
2860 already present in the operands. */
2862 primarg1 = get_narrower (arg1, &unsignedp1);
2863 primother = get_narrower (other, &unsignedpo);
2865 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2866 if (unsignedp1 == unsignedpo
2867 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2868 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2870 tree type = TREE_TYPE (arg0);
2872 /* Make sure shorter operand is extended the right way
2873 to match the longer operand. */
2874 primarg1 = fold_convert (signed_or_unsigned_type_for
2875 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2877 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2878 return 1;
2881 return 0;
2884 /* See if ARG is an expression that is either a comparison or is performing
2885 arithmetic on comparisons. The comparisons must only be comparing
2886 two different values, which will be stored in *CVAL1 and *CVAL2; if
2887 they are nonzero it means that some operands have already been found.
2888 No variables may be used anywhere else in the expression except in the
2889 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2890 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2892 If this is true, return 1. Otherwise, return zero. */
2894 static int
2895 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2897 enum tree_code code = TREE_CODE (arg);
2898 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2900 /* We can handle some of the tcc_expression cases here. */
2901 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2902 tclass = tcc_unary;
2903 else if (tclass == tcc_expression
2904 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2905 || code == COMPOUND_EXPR))
2906 tclass = tcc_binary;
2908 else if (tclass == tcc_expression && code == SAVE_EXPR
2909 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2911 /* If we've already found a CVAL1 or CVAL2, this expression is
2912 two complex to handle. */
2913 if (*cval1 || *cval2)
2914 return 0;
2916 tclass = tcc_unary;
2917 *save_p = 1;
2920 switch (tclass)
2922 case tcc_unary:
2923 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2925 case tcc_binary:
2926 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2927 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2928 cval1, cval2, save_p));
2930 case tcc_constant:
2931 return 1;
2933 case tcc_expression:
2934 if (code == COND_EXPR)
2935 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2936 cval1, cval2, save_p)
2937 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2938 cval1, cval2, save_p)
2939 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2940 cval1, cval2, save_p));
2941 return 0;
2943 case tcc_comparison:
2944 /* First see if we can handle the first operand, then the second. For
2945 the second operand, we know *CVAL1 can't be zero. It must be that
2946 one side of the comparison is each of the values; test for the
2947 case where this isn't true by failing if the two operands
2948 are the same. */
2950 if (operand_equal_p (TREE_OPERAND (arg, 0),
2951 TREE_OPERAND (arg, 1), 0))
2952 return 0;
2954 if (*cval1 == 0)
2955 *cval1 = TREE_OPERAND (arg, 0);
2956 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2958 else if (*cval2 == 0)
2959 *cval2 = TREE_OPERAND (arg, 0);
2960 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2962 else
2963 return 0;
2965 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2967 else if (*cval2 == 0)
2968 *cval2 = TREE_OPERAND (arg, 1);
2969 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2971 else
2972 return 0;
2974 return 1;
2976 default:
2977 return 0;
2981 /* ARG is a tree that is known to contain just arithmetic operations and
2982 comparisons. Evaluate the operations in the tree substituting NEW0 for
2983 any occurrence of OLD0 as an operand of a comparison and likewise for
2984 NEW1 and OLD1. */
2986 static tree
2987 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2988 tree old1, tree new1)
2990 tree type = TREE_TYPE (arg);
2991 enum tree_code code = TREE_CODE (arg);
2992 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2994 /* We can handle some of the tcc_expression cases here. */
2995 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2996 tclass = tcc_unary;
2997 else if (tclass == tcc_expression
2998 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2999 tclass = tcc_binary;
3001 switch (tclass)
3003 case tcc_unary:
3004 return fold_build1_loc (loc, code, type,
3005 eval_subst (loc, TREE_OPERAND (arg, 0),
3006 old0, new0, old1, new1));
3008 case tcc_binary:
3009 return fold_build2_loc (loc, code, type,
3010 eval_subst (loc, TREE_OPERAND (arg, 0),
3011 old0, new0, old1, new1),
3012 eval_subst (loc, TREE_OPERAND (arg, 1),
3013 old0, new0, old1, new1));
3015 case tcc_expression:
3016 switch (code)
3018 case SAVE_EXPR:
3019 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3020 old1, new1);
3022 case COMPOUND_EXPR:
3023 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3024 old1, new1);
3026 case COND_EXPR:
3027 return fold_build3_loc (loc, code, type,
3028 eval_subst (loc, TREE_OPERAND (arg, 0),
3029 old0, new0, old1, new1),
3030 eval_subst (loc, TREE_OPERAND (arg, 1),
3031 old0, new0, old1, new1),
3032 eval_subst (loc, TREE_OPERAND (arg, 2),
3033 old0, new0, old1, new1));
3034 default:
3035 break;
3037 /* Fall through - ??? */
3039 case tcc_comparison:
3041 tree arg0 = TREE_OPERAND (arg, 0);
3042 tree arg1 = TREE_OPERAND (arg, 1);
3044 /* We need to check both for exact equality and tree equality. The
3045 former will be true if the operand has a side-effect. In that
3046 case, we know the operand occurred exactly once. */
3048 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3049 arg0 = new0;
3050 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3051 arg0 = new1;
3053 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3054 arg1 = new0;
3055 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3056 arg1 = new1;
3058 return fold_build2_loc (loc, code, type, arg0, arg1);
3061 default:
3062 return arg;
3066 /* Return a tree for the case when the result of an expression is RESULT
3067 converted to TYPE and OMITTED was previously an operand of the expression
3068 but is now not needed (e.g., we folded OMITTED * 0).
3070 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3071 the conversion of RESULT to TYPE. */
3073 tree
3074 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3076 tree t = fold_convert_loc (loc, type, result);
3078 /* If the resulting operand is an empty statement, just return the omitted
3079 statement casted to void. */
3080 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3081 return build1_loc (loc, NOP_EXPR, void_type_node,
3082 fold_ignored_result (omitted));
3084 if (TREE_SIDE_EFFECTS (omitted))
3085 return build2_loc (loc, COMPOUND_EXPR, type,
3086 fold_ignored_result (omitted), t);
3088 return non_lvalue_loc (loc, t);
3091 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3093 static tree
3094 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3095 tree omitted)
3097 tree t = fold_convert_loc (loc, type, result);
3099 /* If the resulting operand is an empty statement, just return the omitted
3100 statement casted to void. */
3101 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3102 return build1_loc (loc, NOP_EXPR, void_type_node,
3103 fold_ignored_result (omitted));
3105 if (TREE_SIDE_EFFECTS (omitted))
3106 return build2_loc (loc, COMPOUND_EXPR, type,
3107 fold_ignored_result (omitted), t);
3109 return pedantic_non_lvalue_loc (loc, t);
3112 /* Return a tree for the case when the result of an expression is RESULT
3113 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3114 of the expression but are now not needed.
3116 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3117 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3118 evaluated before OMITTED2. Otherwise, if neither has side effects,
3119 just do the conversion of RESULT to TYPE. */
3121 tree
3122 omit_two_operands_loc (location_t loc, tree type, tree result,
3123 tree omitted1, tree omitted2)
3125 tree t = fold_convert_loc (loc, type, result);
3127 if (TREE_SIDE_EFFECTS (omitted2))
3128 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3129 if (TREE_SIDE_EFFECTS (omitted1))
3130 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3132 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3136 /* Return a simplified tree node for the truth-negation of ARG. This
3137 never alters ARG itself. We assume that ARG is an operation that
3138 returns a truth value (0 or 1).
3140 FIXME: one would think we would fold the result, but it causes
3141 problems with the dominator optimizer. */
3143 static tree
3144 fold_truth_not_expr (location_t loc, tree arg)
3146 tree type = TREE_TYPE (arg);
3147 enum tree_code code = TREE_CODE (arg);
3148 location_t loc1, loc2;
3150 /* If this is a comparison, we can simply invert it, except for
3151 floating-point non-equality comparisons, in which case we just
3152 enclose a TRUTH_NOT_EXPR around what we have. */
3154 if (TREE_CODE_CLASS (code) == tcc_comparison)
3156 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3157 if (FLOAT_TYPE_P (op_type)
3158 && flag_trapping_math
3159 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3160 && code != NE_EXPR && code != EQ_EXPR)
3161 return NULL_TREE;
3163 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3164 if (code == ERROR_MARK)
3165 return NULL_TREE;
3167 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3168 TREE_OPERAND (arg, 1));
3171 switch (code)
3173 case INTEGER_CST:
3174 return constant_boolean_node (integer_zerop (arg), type);
3176 case TRUTH_AND_EXPR:
3177 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3178 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3179 return build2_loc (loc, TRUTH_OR_EXPR, type,
3180 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3181 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3183 case TRUTH_OR_EXPR:
3184 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3185 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3186 return build2_loc (loc, TRUTH_AND_EXPR, type,
3187 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3188 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3190 case TRUTH_XOR_EXPR:
3191 /* Here we can invert either operand. We invert the first operand
3192 unless the second operand is a TRUTH_NOT_EXPR in which case our
3193 result is the XOR of the first operand with the inside of the
3194 negation of the second operand. */
3196 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3197 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3198 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3199 else
3200 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3201 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3202 TREE_OPERAND (arg, 1));
3204 case TRUTH_ANDIF_EXPR:
3205 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3206 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3207 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3208 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3209 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3211 case TRUTH_ORIF_EXPR:
3212 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3213 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3214 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3215 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3216 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3218 case TRUTH_NOT_EXPR:
3219 return TREE_OPERAND (arg, 0);
3221 case COND_EXPR:
3223 tree arg1 = TREE_OPERAND (arg, 1);
3224 tree arg2 = TREE_OPERAND (arg, 2);
3226 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3227 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3229 /* A COND_EXPR may have a throw as one operand, which
3230 then has void type. Just leave void operands
3231 as they are. */
3232 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3233 VOID_TYPE_P (TREE_TYPE (arg1))
3234 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3235 VOID_TYPE_P (TREE_TYPE (arg2))
3236 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3239 case COMPOUND_EXPR:
3240 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3241 return build2_loc (loc, COMPOUND_EXPR, type,
3242 TREE_OPERAND (arg, 0),
3243 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3245 case NON_LVALUE_EXPR:
3246 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3247 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3249 CASE_CONVERT:
3250 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3251 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3253 /* ... fall through ... */
3255 case FLOAT_EXPR:
3256 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3257 return build1_loc (loc, TREE_CODE (arg), type,
3258 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3260 case BIT_AND_EXPR:
3261 if (!integer_onep (TREE_OPERAND (arg, 1)))
3262 return NULL_TREE;
3263 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3265 case SAVE_EXPR:
3266 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3268 case CLEANUP_POINT_EXPR:
3269 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3270 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3271 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3273 default:
3274 return NULL_TREE;
3278 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3279 assume that ARG is an operation that returns a truth value (0 or 1
3280 for scalars, 0 or -1 for vectors). Return the folded expression if
3281 folding is successful. Otherwise, return NULL_TREE. */
3283 static tree
3284 fold_invert_truthvalue (location_t loc, tree arg)
3286 tree type = TREE_TYPE (arg);
3287 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3288 ? BIT_NOT_EXPR
3289 : TRUTH_NOT_EXPR,
3290 type, arg);
3293 /* Return a simplified tree node for the truth-negation of ARG. This
3294 never alters ARG itself. We assume that ARG is an operation that
3295 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3297 tree
3298 invert_truthvalue_loc (location_t loc, tree arg)
3300 if (TREE_CODE (arg) == ERROR_MARK)
3301 return arg;
3303 tree type = TREE_TYPE (arg);
3304 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3305 ? BIT_NOT_EXPR
3306 : TRUTH_NOT_EXPR,
3307 type, arg);
3310 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3311 operands are another bit-wise operation with a common input. If so,
3312 distribute the bit operations to save an operation and possibly two if
3313 constants are involved. For example, convert
3314 (A | B) & (A | C) into A | (B & C)
3315 Further simplification will occur if B and C are constants.
3317 If this optimization cannot be done, 0 will be returned. */
3319 static tree
3320 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3321 tree arg0, tree arg1)
3323 tree common;
3324 tree left, right;
3326 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3327 || TREE_CODE (arg0) == code
3328 || (TREE_CODE (arg0) != BIT_AND_EXPR
3329 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3330 return 0;
3332 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3334 common = TREE_OPERAND (arg0, 0);
3335 left = TREE_OPERAND (arg0, 1);
3336 right = TREE_OPERAND (arg1, 1);
3338 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3340 common = TREE_OPERAND (arg0, 0);
3341 left = TREE_OPERAND (arg0, 1);
3342 right = TREE_OPERAND (arg1, 0);
3344 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3346 common = TREE_OPERAND (arg0, 1);
3347 left = TREE_OPERAND (arg0, 0);
3348 right = TREE_OPERAND (arg1, 1);
3350 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3352 common = TREE_OPERAND (arg0, 1);
3353 left = TREE_OPERAND (arg0, 0);
3354 right = TREE_OPERAND (arg1, 0);
3356 else
3357 return 0;
3359 common = fold_convert_loc (loc, type, common);
3360 left = fold_convert_loc (loc, type, left);
3361 right = fold_convert_loc (loc, type, right);
3362 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3363 fold_build2_loc (loc, code, type, left, right));
3366 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3367 with code CODE. This optimization is unsafe. */
3368 static tree
3369 distribute_real_division (location_t loc, enum tree_code code, tree type,
3370 tree arg0, tree arg1)
3372 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3373 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3375 /* (A / C) +- (B / C) -> (A +- B) / C. */
3376 if (mul0 == mul1
3377 && operand_equal_p (TREE_OPERAND (arg0, 1),
3378 TREE_OPERAND (arg1, 1), 0))
3379 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3380 fold_build2_loc (loc, code, type,
3381 TREE_OPERAND (arg0, 0),
3382 TREE_OPERAND (arg1, 0)),
3383 TREE_OPERAND (arg0, 1));
3385 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3386 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3387 TREE_OPERAND (arg1, 0), 0)
3388 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3389 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3391 REAL_VALUE_TYPE r0, r1;
3392 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3393 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3394 if (!mul0)
3395 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3396 if (!mul1)
3397 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3398 real_arithmetic (&r0, code, &r0, &r1);
3399 return fold_build2_loc (loc, MULT_EXPR, type,
3400 TREE_OPERAND (arg0, 0),
3401 build_real (type, r0));
3404 return NULL_TREE;
3407 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3408 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3410 static tree
3411 make_bit_field_ref (location_t loc, tree inner, tree type,
3412 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3414 tree result, bftype;
3416 if (bitpos == 0)
3418 tree size = TYPE_SIZE (TREE_TYPE (inner));
3419 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3420 || POINTER_TYPE_P (TREE_TYPE (inner)))
3421 && host_integerp (size, 0)
3422 && tree_low_cst (size, 0) == bitsize)
3423 return fold_convert_loc (loc, type, inner);
3426 bftype = type;
3427 if (TYPE_PRECISION (bftype) != bitsize
3428 || TYPE_UNSIGNED (bftype) == !unsignedp)
3429 bftype = build_nonstandard_integer_type (bitsize, 0);
3431 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3432 size_int (bitsize), bitsize_int (bitpos));
3434 if (bftype != type)
3435 result = fold_convert_loc (loc, type, result);
3437 return result;
3440 /* Optimize a bit-field compare.
3442 There are two cases: First is a compare against a constant and the
3443 second is a comparison of two items where the fields are at the same
3444 bit position relative to the start of a chunk (byte, halfword, word)
3445 large enough to contain it. In these cases we can avoid the shift
3446 implicit in bitfield extractions.
3448 For constants, we emit a compare of the shifted constant with the
3449 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3450 compared. For two fields at the same position, we do the ANDs with the
3451 similar mask and compare the result of the ANDs.
3453 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3454 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3455 are the left and right operands of the comparison, respectively.
3457 If the optimization described above can be done, we return the resulting
3458 tree. Otherwise we return zero. */
3460 static tree
3461 optimize_bit_field_compare (location_t loc, enum tree_code code,
3462 tree compare_type, tree lhs, tree rhs)
3464 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3465 tree type = TREE_TYPE (lhs);
3466 tree signed_type, unsigned_type;
3467 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3468 enum machine_mode lmode, rmode, nmode;
3469 int lunsignedp, runsignedp;
3470 int lvolatilep = 0, rvolatilep = 0;
3471 tree linner, rinner = NULL_TREE;
3472 tree mask;
3473 tree offset;
3475 /* In the strict volatile bitfields case, doing code changes here may prevent
3476 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3477 if (flag_strict_volatile_bitfields > 0)
3478 return 0;
3480 /* Get all the information about the extractions being done. If the bit size
3481 if the same as the size of the underlying object, we aren't doing an
3482 extraction at all and so can do nothing. We also don't want to
3483 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3484 then will no longer be able to replace it. */
3485 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3486 &lunsignedp, &lvolatilep, false);
3487 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3488 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3489 return 0;
3491 if (!const_p)
3493 /* If this is not a constant, we can only do something if bit positions,
3494 sizes, and signedness are the same. */
3495 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3496 &runsignedp, &rvolatilep, false);
3498 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3499 || lunsignedp != runsignedp || offset != 0
3500 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3501 return 0;
3504 /* See if we can find a mode to refer to this field. We should be able to,
3505 but fail if we can't. */
3506 if (lvolatilep
3507 && GET_MODE_BITSIZE (lmode) > 0
3508 && flag_strict_volatile_bitfields > 0)
3509 nmode = lmode;
3510 else
3511 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3512 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3513 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3514 TYPE_ALIGN (TREE_TYPE (rinner))),
3515 word_mode, lvolatilep || rvolatilep);
3516 if (nmode == VOIDmode)
3517 return 0;
3519 /* Set signed and unsigned types of the precision of this mode for the
3520 shifts below. */
3521 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3522 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3524 /* Compute the bit position and size for the new reference and our offset
3525 within it. If the new reference is the same size as the original, we
3526 won't optimize anything, so return zero. */
3527 nbitsize = GET_MODE_BITSIZE (nmode);
3528 nbitpos = lbitpos & ~ (nbitsize - 1);
3529 lbitpos -= nbitpos;
3530 if (nbitsize == lbitsize)
3531 return 0;
3533 if (BYTES_BIG_ENDIAN)
3534 lbitpos = nbitsize - lbitsize - lbitpos;
3536 /* Make the mask to be used against the extracted field. */
3537 mask = build_int_cst_type (unsigned_type, -1);
3538 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3539 mask = const_binop (RSHIFT_EXPR, mask,
3540 size_int (nbitsize - lbitsize - lbitpos));
3542 if (! const_p)
3543 /* If not comparing with constant, just rework the comparison
3544 and return. */
3545 return fold_build2_loc (loc, code, compare_type,
3546 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3547 make_bit_field_ref (loc, linner,
3548 unsigned_type,
3549 nbitsize, nbitpos,
3551 mask),
3552 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3553 make_bit_field_ref (loc, rinner,
3554 unsigned_type,
3555 nbitsize, nbitpos,
3557 mask));
3559 /* Otherwise, we are handling the constant case. See if the constant is too
3560 big for the field. Warn and return a tree of for 0 (false) if so. We do
3561 this not only for its own sake, but to avoid having to test for this
3562 error case below. If we didn't, we might generate wrong code.
3564 For unsigned fields, the constant shifted right by the field length should
3565 be all zero. For signed fields, the high-order bits should agree with
3566 the sign bit. */
3568 if (lunsignedp)
3570 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3571 fold_convert_loc (loc,
3572 unsigned_type, rhs),
3573 size_int (lbitsize))))
3575 warning (0, "comparison is always %d due to width of bit-field",
3576 code == NE_EXPR);
3577 return constant_boolean_node (code == NE_EXPR, compare_type);
3580 else
3582 tree tem = const_binop (RSHIFT_EXPR,
3583 fold_convert_loc (loc, signed_type, rhs),
3584 size_int (lbitsize - 1));
3585 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3587 warning (0, "comparison is always %d due to width of bit-field",
3588 code == NE_EXPR);
3589 return constant_boolean_node (code == NE_EXPR, compare_type);
3593 /* Single-bit compares should always be against zero. */
3594 if (lbitsize == 1 && ! integer_zerop (rhs))
3596 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3597 rhs = build_int_cst (type, 0);
3600 /* Make a new bitfield reference, shift the constant over the
3601 appropriate number of bits and mask it with the computed mask
3602 (in case this was a signed field). If we changed it, make a new one. */
3603 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3604 if (lvolatilep)
3606 TREE_SIDE_EFFECTS (lhs) = 1;
3607 TREE_THIS_VOLATILE (lhs) = 1;
3610 rhs = const_binop (BIT_AND_EXPR,
3611 const_binop (LSHIFT_EXPR,
3612 fold_convert_loc (loc, unsigned_type, rhs),
3613 size_int (lbitpos)),
3614 mask);
3616 lhs = build2_loc (loc, code, compare_type,
3617 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3618 return lhs;
3621 /* Subroutine for fold_truth_andor_1: decode a field reference.
3623 If EXP is a comparison reference, we return the innermost reference.
3625 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3626 set to the starting bit number.
3628 If the innermost field can be completely contained in a mode-sized
3629 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3631 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3632 otherwise it is not changed.
3634 *PUNSIGNEDP is set to the signedness of the field.
3636 *PMASK is set to the mask used. This is either contained in a
3637 BIT_AND_EXPR or derived from the width of the field.
3639 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3641 Return 0 if this is not a component reference or is one that we can't
3642 do anything with. */
3644 static tree
3645 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3646 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3647 int *punsignedp, int *pvolatilep,
3648 tree *pmask, tree *pand_mask)
3650 tree outer_type = 0;
3651 tree and_mask = 0;
3652 tree mask, inner, offset;
3653 tree unsigned_type;
3654 unsigned int precision;
3656 /* All the optimizations using this function assume integer fields.
3657 There are problems with FP fields since the type_for_size call
3658 below can fail for, e.g., XFmode. */
3659 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3660 return 0;
3662 /* We are interested in the bare arrangement of bits, so strip everything
3663 that doesn't affect the machine mode. However, record the type of the
3664 outermost expression if it may matter below. */
3665 if (CONVERT_EXPR_P (exp)
3666 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3667 outer_type = TREE_TYPE (exp);
3668 STRIP_NOPS (exp);
3670 if (TREE_CODE (exp) == BIT_AND_EXPR)
3672 and_mask = TREE_OPERAND (exp, 1);
3673 exp = TREE_OPERAND (exp, 0);
3674 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3675 if (TREE_CODE (and_mask) != INTEGER_CST)
3676 return 0;
3679 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3680 punsignedp, pvolatilep, false);
3681 if ((inner == exp && and_mask == 0)
3682 || *pbitsize < 0 || offset != 0
3683 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3684 return 0;
3686 /* If the number of bits in the reference is the same as the bitsize of
3687 the outer type, then the outer type gives the signedness. Otherwise
3688 (in case of a small bitfield) the signedness is unchanged. */
3689 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3690 *punsignedp = TYPE_UNSIGNED (outer_type);
3692 /* Compute the mask to access the bitfield. */
3693 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3694 precision = TYPE_PRECISION (unsigned_type);
3696 mask = build_int_cst_type (unsigned_type, -1);
3698 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3699 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3701 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3702 if (and_mask != 0)
3703 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3704 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3706 *pmask = mask;
3707 *pand_mask = and_mask;
3708 return inner;
3711 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3712 bit positions. */
3714 static int
3715 all_ones_mask_p (const_tree mask, int size)
3717 tree type = TREE_TYPE (mask);
3718 unsigned int precision = TYPE_PRECISION (type);
3719 tree tmask;
3721 tmask = build_int_cst_type (signed_type_for (type), -1);
3723 return
3724 tree_int_cst_equal (mask,
3725 const_binop (RSHIFT_EXPR,
3726 const_binop (LSHIFT_EXPR, tmask,
3727 size_int (precision - size)),
3728 size_int (precision - size)));
3731 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3732 represents the sign bit of EXP's type. If EXP represents a sign
3733 or zero extension, also test VAL against the unextended type.
3734 The return value is the (sub)expression whose sign bit is VAL,
3735 or NULL_TREE otherwise. */
3737 static tree
3738 sign_bit_p (tree exp, const_tree val)
3740 unsigned HOST_WIDE_INT mask_lo, lo;
3741 HOST_WIDE_INT mask_hi, hi;
3742 int width;
3743 tree t;
3745 /* Tree EXP must have an integral type. */
3746 t = TREE_TYPE (exp);
3747 if (! INTEGRAL_TYPE_P (t))
3748 return NULL_TREE;
3750 /* Tree VAL must be an integer constant. */
3751 if (TREE_CODE (val) != INTEGER_CST
3752 || TREE_OVERFLOW (val))
3753 return NULL_TREE;
3755 width = TYPE_PRECISION (t);
3756 if (width > HOST_BITS_PER_WIDE_INT)
3758 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3759 lo = 0;
3761 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3762 mask_lo = -1;
3764 else
3766 hi = 0;
3767 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3769 mask_hi = 0;
3770 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3773 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3774 treat VAL as if it were unsigned. */
3775 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3776 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3777 return exp;
3779 /* Handle extension from a narrower type. */
3780 if (TREE_CODE (exp) == NOP_EXPR
3781 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3782 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3784 return NULL_TREE;
3787 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3788 to be evaluated unconditionally. */
3790 static int
3791 simple_operand_p (const_tree exp)
3793 /* Strip any conversions that don't change the machine mode. */
3794 STRIP_NOPS (exp);
3796 return (CONSTANT_CLASS_P (exp)
3797 || TREE_CODE (exp) == SSA_NAME
3798 || (DECL_P (exp)
3799 && ! TREE_ADDRESSABLE (exp)
3800 && ! TREE_THIS_VOLATILE (exp)
3801 && ! DECL_NONLOCAL (exp)
3802 /* Don't regard global variables as simple. They may be
3803 allocated in ways unknown to the compiler (shared memory,
3804 #pragma weak, etc). */
3805 && ! TREE_PUBLIC (exp)
3806 && ! DECL_EXTERNAL (exp)
3807 /* Weakrefs are not safe to be read, since they can be NULL.
3808 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3809 have DECL_WEAK flag set. */
3810 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3811 /* Loading a static variable is unduly expensive, but global
3812 registers aren't expensive. */
3813 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3816 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3817 to be evaluated unconditionally.
3818 I addition to simple_operand_p, we assume that comparisons, conversions,
3819 and logic-not operations are simple, if their operands are simple, too. */
3821 static bool
3822 simple_operand_p_2 (tree exp)
3824 enum tree_code code;
3826 if (TREE_SIDE_EFFECTS (exp)
3827 || tree_could_trap_p (exp))
3828 return false;
3830 while (CONVERT_EXPR_P (exp))
3831 exp = TREE_OPERAND (exp, 0);
3833 code = TREE_CODE (exp);
3835 if (TREE_CODE_CLASS (code) == tcc_comparison)
3836 return (simple_operand_p (TREE_OPERAND (exp, 0))
3837 && simple_operand_p (TREE_OPERAND (exp, 1)));
3839 if (code == TRUTH_NOT_EXPR)
3840 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3842 return simple_operand_p (exp);
3846 /* The following functions are subroutines to fold_range_test and allow it to
3847 try to change a logical combination of comparisons into a range test.
3849 For example, both
3850 X == 2 || X == 3 || X == 4 || X == 5
3852 X >= 2 && X <= 5
3853 are converted to
3854 (unsigned) (X - 2) <= 3
3856 We describe each set of comparisons as being either inside or outside
3857 a range, using a variable named like IN_P, and then describe the
3858 range with a lower and upper bound. If one of the bounds is omitted,
3859 it represents either the highest or lowest value of the type.
3861 In the comments below, we represent a range by two numbers in brackets
3862 preceded by a "+" to designate being inside that range, or a "-" to
3863 designate being outside that range, so the condition can be inverted by
3864 flipping the prefix. An omitted bound is represented by a "-". For
3865 example, "- [-, 10]" means being outside the range starting at the lowest
3866 possible value and ending at 10, in other words, being greater than 10.
3867 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3868 always false.
3870 We set up things so that the missing bounds are handled in a consistent
3871 manner so neither a missing bound nor "true" and "false" need to be
3872 handled using a special case. */
3874 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3875 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3876 and UPPER1_P are nonzero if the respective argument is an upper bound
3877 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3878 must be specified for a comparison. ARG1 will be converted to ARG0's
3879 type if both are specified. */
3881 static tree
3882 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3883 tree arg1, int upper1_p)
3885 tree tem;
3886 int result;
3887 int sgn0, sgn1;
3889 /* If neither arg represents infinity, do the normal operation.
3890 Else, if not a comparison, return infinity. Else handle the special
3891 comparison rules. Note that most of the cases below won't occur, but
3892 are handled for consistency. */
3894 if (arg0 != 0 && arg1 != 0)
3896 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3897 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3898 STRIP_NOPS (tem);
3899 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3902 if (TREE_CODE_CLASS (code) != tcc_comparison)
3903 return 0;
3905 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3906 for neither. In real maths, we cannot assume open ended ranges are
3907 the same. But, this is computer arithmetic, where numbers are finite.
3908 We can therefore make the transformation of any unbounded range with
3909 the value Z, Z being greater than any representable number. This permits
3910 us to treat unbounded ranges as equal. */
3911 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3912 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3913 switch (code)
3915 case EQ_EXPR:
3916 result = sgn0 == sgn1;
3917 break;
3918 case NE_EXPR:
3919 result = sgn0 != sgn1;
3920 break;
3921 case LT_EXPR:
3922 result = sgn0 < sgn1;
3923 break;
3924 case LE_EXPR:
3925 result = sgn0 <= sgn1;
3926 break;
3927 case GT_EXPR:
3928 result = sgn0 > sgn1;
3929 break;
3930 case GE_EXPR:
3931 result = sgn0 >= sgn1;
3932 break;
3933 default:
3934 gcc_unreachable ();
3937 return constant_boolean_node (result, type);
3940 /* Helper routine for make_range. Perform one step for it, return
3941 new expression if the loop should continue or NULL_TREE if it should
3942 stop. */
3944 tree
3945 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3946 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3947 bool *strict_overflow_p)
3949 tree arg0_type = TREE_TYPE (arg0);
3950 tree n_low, n_high, low = *p_low, high = *p_high;
3951 int in_p = *p_in_p, n_in_p;
3953 switch (code)
3955 case TRUTH_NOT_EXPR:
3956 /* We can only do something if the range is testing for zero. */
3957 if (low == NULL_TREE || high == NULL_TREE
3958 || ! integer_zerop (low) || ! integer_zerop (high))
3959 return NULL_TREE;
3960 *p_in_p = ! in_p;
3961 return arg0;
3963 case EQ_EXPR: case NE_EXPR:
3964 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3965 /* We can only do something if the range is testing for zero
3966 and if the second operand is an integer constant. Note that
3967 saying something is "in" the range we make is done by
3968 complementing IN_P since it will set in the initial case of
3969 being not equal to zero; "out" is leaving it alone. */
3970 if (low == NULL_TREE || high == NULL_TREE
3971 || ! integer_zerop (low) || ! integer_zerop (high)
3972 || TREE_CODE (arg1) != INTEGER_CST)
3973 return NULL_TREE;
3975 switch (code)
3977 case NE_EXPR: /* - [c, c] */
3978 low = high = arg1;
3979 break;
3980 case EQ_EXPR: /* + [c, c] */
3981 in_p = ! in_p, low = high = arg1;
3982 break;
3983 case GT_EXPR: /* - [-, c] */
3984 low = 0, high = arg1;
3985 break;
3986 case GE_EXPR: /* + [c, -] */
3987 in_p = ! in_p, low = arg1, high = 0;
3988 break;
3989 case LT_EXPR: /* - [c, -] */
3990 low = arg1, high = 0;
3991 break;
3992 case LE_EXPR: /* + [-, c] */
3993 in_p = ! in_p, low = 0, high = arg1;
3994 break;
3995 default:
3996 gcc_unreachable ();
3999 /* If this is an unsigned comparison, we also know that EXP is
4000 greater than or equal to zero. We base the range tests we make
4001 on that fact, so we record it here so we can parse existing
4002 range tests. We test arg0_type since often the return type
4003 of, e.g. EQ_EXPR, is boolean. */
4004 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4006 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4007 in_p, low, high, 1,
4008 build_int_cst (arg0_type, 0),
4009 NULL_TREE))
4010 return NULL_TREE;
4012 in_p = n_in_p, low = n_low, high = n_high;
4014 /* If the high bound is missing, but we have a nonzero low
4015 bound, reverse the range so it goes from zero to the low bound
4016 minus 1. */
4017 if (high == 0 && low && ! integer_zerop (low))
4019 in_p = ! in_p;
4020 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4021 integer_one_node, 0);
4022 low = build_int_cst (arg0_type, 0);
4026 *p_low = low;
4027 *p_high = high;
4028 *p_in_p = in_p;
4029 return arg0;
4031 case NEGATE_EXPR:
4032 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4033 low and high are non-NULL, then normalize will DTRT. */
4034 if (!TYPE_UNSIGNED (arg0_type)
4035 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4037 if (low == NULL_TREE)
4038 low = TYPE_MIN_VALUE (arg0_type);
4039 if (high == NULL_TREE)
4040 high = TYPE_MAX_VALUE (arg0_type);
4043 /* (-x) IN [a,b] -> x in [-b, -a] */
4044 n_low = range_binop (MINUS_EXPR, exp_type,
4045 build_int_cst (exp_type, 0),
4046 0, high, 1);
4047 n_high = range_binop (MINUS_EXPR, exp_type,
4048 build_int_cst (exp_type, 0),
4049 0, low, 0);
4050 if (n_high != 0 && TREE_OVERFLOW (n_high))
4051 return NULL_TREE;
4052 goto normalize;
4054 case BIT_NOT_EXPR:
4055 /* ~ X -> -X - 1 */
4056 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4057 build_int_cst (exp_type, 1));
4059 case PLUS_EXPR:
4060 case MINUS_EXPR:
4061 if (TREE_CODE (arg1) != INTEGER_CST)
4062 return NULL_TREE;
4064 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4065 move a constant to the other side. */
4066 if (!TYPE_UNSIGNED (arg0_type)
4067 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4068 return NULL_TREE;
4070 /* If EXP is signed, any overflow in the computation is undefined,
4071 so we don't worry about it so long as our computations on
4072 the bounds don't overflow. For unsigned, overflow is defined
4073 and this is exactly the right thing. */
4074 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4075 arg0_type, low, 0, arg1, 0);
4076 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4077 arg0_type, high, 1, arg1, 0);
4078 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4079 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4080 return NULL_TREE;
4082 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4083 *strict_overflow_p = true;
4085 normalize:
4086 /* Check for an unsigned range which has wrapped around the maximum
4087 value thus making n_high < n_low, and normalize it. */
4088 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4090 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4091 integer_one_node, 0);
4092 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4093 integer_one_node, 0);
4095 /* If the range is of the form +/- [ x+1, x ], we won't
4096 be able to normalize it. But then, it represents the
4097 whole range or the empty set, so make it
4098 +/- [ -, - ]. */
4099 if (tree_int_cst_equal (n_low, low)
4100 && tree_int_cst_equal (n_high, high))
4101 low = high = 0;
4102 else
4103 in_p = ! in_p;
4105 else
4106 low = n_low, high = n_high;
4108 *p_low = low;
4109 *p_high = high;
4110 *p_in_p = in_p;
4111 return arg0;
4113 CASE_CONVERT:
4114 case NON_LVALUE_EXPR:
4115 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4116 return NULL_TREE;
4118 if (! INTEGRAL_TYPE_P (arg0_type)
4119 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4120 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4121 return NULL_TREE;
4123 n_low = low, n_high = high;
4125 if (n_low != 0)
4126 n_low = fold_convert_loc (loc, arg0_type, n_low);
4128 if (n_high != 0)
4129 n_high = fold_convert_loc (loc, arg0_type, n_high);
4131 /* If we're converting arg0 from an unsigned type, to exp,
4132 a signed type, we will be doing the comparison as unsigned.
4133 The tests above have already verified that LOW and HIGH
4134 are both positive.
4136 So we have to ensure that we will handle large unsigned
4137 values the same way that the current signed bounds treat
4138 negative values. */
4140 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4142 tree high_positive;
4143 tree equiv_type;
4144 /* For fixed-point modes, we need to pass the saturating flag
4145 as the 2nd parameter. */
4146 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4147 equiv_type
4148 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4149 TYPE_SATURATING (arg0_type));
4150 else
4151 equiv_type
4152 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4154 /* A range without an upper bound is, naturally, unbounded.
4155 Since convert would have cropped a very large value, use
4156 the max value for the destination type. */
4157 high_positive
4158 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4159 : TYPE_MAX_VALUE (arg0_type);
4161 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4162 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4163 fold_convert_loc (loc, arg0_type,
4164 high_positive),
4165 build_int_cst (arg0_type, 1));
4167 /* If the low bound is specified, "and" the range with the
4168 range for which the original unsigned value will be
4169 positive. */
4170 if (low != 0)
4172 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4173 1, fold_convert_loc (loc, arg0_type,
4174 integer_zero_node),
4175 high_positive))
4176 return NULL_TREE;
4178 in_p = (n_in_p == in_p);
4180 else
4182 /* Otherwise, "or" the range with the range of the input
4183 that will be interpreted as negative. */
4184 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4185 1, fold_convert_loc (loc, arg0_type,
4186 integer_zero_node),
4187 high_positive))
4188 return NULL_TREE;
4190 in_p = (in_p != n_in_p);
4194 *p_low = n_low;
4195 *p_high = n_high;
4196 *p_in_p = in_p;
4197 return arg0;
4199 default:
4200 return NULL_TREE;
4204 /* Given EXP, a logical expression, set the range it is testing into
4205 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4206 actually being tested. *PLOW and *PHIGH will be made of the same
4207 type as the returned expression. If EXP is not a comparison, we
4208 will most likely not be returning a useful value and range. Set
4209 *STRICT_OVERFLOW_P to true if the return value is only valid
4210 because signed overflow is undefined; otherwise, do not change
4211 *STRICT_OVERFLOW_P. */
4213 tree
4214 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4215 bool *strict_overflow_p)
4217 enum tree_code code;
4218 tree arg0, arg1 = NULL_TREE;
4219 tree exp_type, nexp;
4220 int in_p;
4221 tree low, high;
4222 location_t loc = EXPR_LOCATION (exp);
4224 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4225 and see if we can refine the range. Some of the cases below may not
4226 happen, but it doesn't seem worth worrying about this. We "continue"
4227 the outer loop when we've changed something; otherwise we "break"
4228 the switch, which will "break" the while. */
4230 in_p = 0;
4231 low = high = build_int_cst (TREE_TYPE (exp), 0);
4233 while (1)
4235 code = TREE_CODE (exp);
4236 exp_type = TREE_TYPE (exp);
4237 arg0 = NULL_TREE;
4239 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4241 if (TREE_OPERAND_LENGTH (exp) > 0)
4242 arg0 = TREE_OPERAND (exp, 0);
4243 if (TREE_CODE_CLASS (code) == tcc_binary
4244 || TREE_CODE_CLASS (code) == tcc_comparison
4245 || (TREE_CODE_CLASS (code) == tcc_expression
4246 && TREE_OPERAND_LENGTH (exp) > 1))
4247 arg1 = TREE_OPERAND (exp, 1);
4249 if (arg0 == NULL_TREE)
4250 break;
4252 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4253 &high, &in_p, strict_overflow_p);
4254 if (nexp == NULL_TREE)
4255 break;
4256 exp = nexp;
4259 /* If EXP is a constant, we can evaluate whether this is true or false. */
4260 if (TREE_CODE (exp) == INTEGER_CST)
4262 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4263 exp, 0, low, 0))
4264 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4265 exp, 1, high, 1)));
4266 low = high = 0;
4267 exp = 0;
4270 *pin_p = in_p, *plow = low, *phigh = high;
4271 return exp;
4274 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4275 type, TYPE, return an expression to test if EXP is in (or out of, depending
4276 on IN_P) the range. Return 0 if the test couldn't be created. */
4278 tree
4279 build_range_check (location_t loc, tree type, tree exp, int in_p,
4280 tree low, tree high)
4282 tree etype = TREE_TYPE (exp), value;
4284 #ifdef HAVE_canonicalize_funcptr_for_compare
4285 /* Disable this optimization for function pointer expressions
4286 on targets that require function pointer canonicalization. */
4287 if (HAVE_canonicalize_funcptr_for_compare
4288 && TREE_CODE (etype) == POINTER_TYPE
4289 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4290 return NULL_TREE;
4291 #endif
4293 if (! in_p)
4295 value = build_range_check (loc, type, exp, 1, low, high);
4296 if (value != 0)
4297 return invert_truthvalue_loc (loc, value);
4299 return 0;
4302 if (low == 0 && high == 0)
4303 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4305 if (low == 0)
4306 return fold_build2_loc (loc, LE_EXPR, type, exp,
4307 fold_convert_loc (loc, etype, high));
4309 if (high == 0)
4310 return fold_build2_loc (loc, GE_EXPR, type, exp,
4311 fold_convert_loc (loc, etype, low));
4313 if (operand_equal_p (low, high, 0))
4314 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4315 fold_convert_loc (loc, etype, low));
4317 if (integer_zerop (low))
4319 if (! TYPE_UNSIGNED (etype))
4321 etype = unsigned_type_for (etype);
4322 high = fold_convert_loc (loc, etype, high);
4323 exp = fold_convert_loc (loc, etype, exp);
4325 return build_range_check (loc, type, exp, 1, 0, high);
4328 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4329 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4331 unsigned HOST_WIDE_INT lo;
4332 HOST_WIDE_INT hi;
4333 int prec;
4335 prec = TYPE_PRECISION (etype);
4336 if (prec <= HOST_BITS_PER_WIDE_INT)
4338 hi = 0;
4339 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4341 else
4343 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4344 lo = HOST_WIDE_INT_M1U;
4347 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4349 if (TYPE_UNSIGNED (etype))
4351 tree signed_etype = signed_type_for (etype);
4352 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4353 etype
4354 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4355 else
4356 etype = signed_etype;
4357 exp = fold_convert_loc (loc, etype, exp);
4359 return fold_build2_loc (loc, GT_EXPR, type, exp,
4360 build_int_cst (etype, 0));
4364 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4365 This requires wrap-around arithmetics for the type of the expression.
4366 First make sure that arithmetics in this type is valid, then make sure
4367 that it wraps around. */
4368 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4369 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4370 TYPE_UNSIGNED (etype));
4372 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4374 tree utype, minv, maxv;
4376 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4377 for the type in question, as we rely on this here. */
4378 utype = unsigned_type_for (etype);
4379 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4380 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4381 integer_one_node, 1);
4382 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4384 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4385 minv, 1, maxv, 1)))
4386 etype = utype;
4387 else
4388 return 0;
4391 high = fold_convert_loc (loc, etype, high);
4392 low = fold_convert_loc (loc, etype, low);
4393 exp = fold_convert_loc (loc, etype, exp);
4395 value = const_binop (MINUS_EXPR, high, low);
4398 if (POINTER_TYPE_P (etype))
4400 if (value != 0 && !TREE_OVERFLOW (value))
4402 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4403 return build_range_check (loc, type,
4404 fold_build_pointer_plus_loc (loc, exp, low),
4405 1, build_int_cst (etype, 0), value);
4407 return 0;
4410 if (value != 0 && !TREE_OVERFLOW (value))
4411 return build_range_check (loc, type,
4412 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4413 1, build_int_cst (etype, 0), value);
4415 return 0;
4418 /* Return the predecessor of VAL in its type, handling the infinite case. */
4420 static tree
4421 range_predecessor (tree val)
4423 tree type = TREE_TYPE (val);
4425 if (INTEGRAL_TYPE_P (type)
4426 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4427 return 0;
4428 else
4429 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4432 /* Return the successor of VAL in its type, handling the infinite case. */
4434 static tree
4435 range_successor (tree val)
4437 tree type = TREE_TYPE (val);
4439 if (INTEGRAL_TYPE_P (type)
4440 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4441 return 0;
4442 else
4443 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4446 /* Given two ranges, see if we can merge them into one. Return 1 if we
4447 can, 0 if we can't. Set the output range into the specified parameters. */
4449 bool
4450 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4451 tree high0, int in1_p, tree low1, tree high1)
4453 int no_overlap;
4454 int subset;
4455 int temp;
4456 tree tem;
4457 int in_p;
4458 tree low, high;
4459 int lowequal = ((low0 == 0 && low1 == 0)
4460 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4461 low0, 0, low1, 0)));
4462 int highequal = ((high0 == 0 && high1 == 0)
4463 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4464 high0, 1, high1, 1)));
4466 /* Make range 0 be the range that starts first, or ends last if they
4467 start at the same value. Swap them if it isn't. */
4468 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4469 low0, 0, low1, 0))
4470 || (lowequal
4471 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4472 high1, 1, high0, 1))))
4474 temp = in0_p, in0_p = in1_p, in1_p = temp;
4475 tem = low0, low0 = low1, low1 = tem;
4476 tem = high0, high0 = high1, high1 = tem;
4479 /* Now flag two cases, whether the ranges are disjoint or whether the
4480 second range is totally subsumed in the first. Note that the tests
4481 below are simplified by the ones above. */
4482 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4483 high0, 1, low1, 0));
4484 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4485 high1, 1, high0, 1));
4487 /* We now have four cases, depending on whether we are including or
4488 excluding the two ranges. */
4489 if (in0_p && in1_p)
4491 /* If they don't overlap, the result is false. If the second range
4492 is a subset it is the result. Otherwise, the range is from the start
4493 of the second to the end of the first. */
4494 if (no_overlap)
4495 in_p = 0, low = high = 0;
4496 else if (subset)
4497 in_p = 1, low = low1, high = high1;
4498 else
4499 in_p = 1, low = low1, high = high0;
4502 else if (in0_p && ! in1_p)
4504 /* If they don't overlap, the result is the first range. If they are
4505 equal, the result is false. If the second range is a subset of the
4506 first, and the ranges begin at the same place, we go from just after
4507 the end of the second range to the end of the first. If the second
4508 range is not a subset of the first, or if it is a subset and both
4509 ranges end at the same place, the range starts at the start of the
4510 first range and ends just before the second range.
4511 Otherwise, we can't describe this as a single range. */
4512 if (no_overlap)
4513 in_p = 1, low = low0, high = high0;
4514 else if (lowequal && highequal)
4515 in_p = 0, low = high = 0;
4516 else if (subset && lowequal)
4518 low = range_successor (high1);
4519 high = high0;
4520 in_p = 1;
4521 if (low == 0)
4523 /* We are in the weird situation where high0 > high1 but
4524 high1 has no successor. Punt. */
4525 return 0;
4528 else if (! subset || highequal)
4530 low = low0;
4531 high = range_predecessor (low1);
4532 in_p = 1;
4533 if (high == 0)
4535 /* low0 < low1 but low1 has no predecessor. Punt. */
4536 return 0;
4539 else
4540 return 0;
4543 else if (! in0_p && in1_p)
4545 /* If they don't overlap, the result is the second range. If the second
4546 is a subset of the first, the result is false. Otherwise,
4547 the range starts just after the first range and ends at the
4548 end of the second. */
4549 if (no_overlap)
4550 in_p = 1, low = low1, high = high1;
4551 else if (subset || highequal)
4552 in_p = 0, low = high = 0;
4553 else
4555 low = range_successor (high0);
4556 high = high1;
4557 in_p = 1;
4558 if (low == 0)
4560 /* high1 > high0 but high0 has no successor. Punt. */
4561 return 0;
4566 else
4568 /* The case where we are excluding both ranges. Here the complex case
4569 is if they don't overlap. In that case, the only time we have a
4570 range is if they are adjacent. If the second is a subset of the
4571 first, the result is the first. Otherwise, the range to exclude
4572 starts at the beginning of the first range and ends at the end of the
4573 second. */
4574 if (no_overlap)
4576 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4577 range_successor (high0),
4578 1, low1, 0)))
4579 in_p = 0, low = low0, high = high1;
4580 else
4582 /* Canonicalize - [min, x] into - [-, x]. */
4583 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4584 switch (TREE_CODE (TREE_TYPE (low0)))
4586 case ENUMERAL_TYPE:
4587 if (TYPE_PRECISION (TREE_TYPE (low0))
4588 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4589 break;
4590 /* FALLTHROUGH */
4591 case INTEGER_TYPE:
4592 if (tree_int_cst_equal (low0,
4593 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4594 low0 = 0;
4595 break;
4596 case POINTER_TYPE:
4597 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4598 && integer_zerop (low0))
4599 low0 = 0;
4600 break;
4601 default:
4602 break;
4605 /* Canonicalize - [x, max] into - [x, -]. */
4606 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4607 switch (TREE_CODE (TREE_TYPE (high1)))
4609 case ENUMERAL_TYPE:
4610 if (TYPE_PRECISION (TREE_TYPE (high1))
4611 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4612 break;
4613 /* FALLTHROUGH */
4614 case INTEGER_TYPE:
4615 if (tree_int_cst_equal (high1,
4616 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4617 high1 = 0;
4618 break;
4619 case POINTER_TYPE:
4620 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4621 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4622 high1, 1,
4623 integer_one_node, 1)))
4624 high1 = 0;
4625 break;
4626 default:
4627 break;
4630 /* The ranges might be also adjacent between the maximum and
4631 minimum values of the given type. For
4632 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4633 return + [x + 1, y - 1]. */
4634 if (low0 == 0 && high1 == 0)
4636 low = range_successor (high0);
4637 high = range_predecessor (low1);
4638 if (low == 0 || high == 0)
4639 return 0;
4641 in_p = 1;
4643 else
4644 return 0;
4647 else if (subset)
4648 in_p = 0, low = low0, high = high0;
4649 else
4650 in_p = 0, low = low0, high = high1;
4653 *pin_p = in_p, *plow = low, *phigh = high;
4654 return 1;
4658 /* Subroutine of fold, looking inside expressions of the form
4659 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4660 of the COND_EXPR. This function is being used also to optimize
4661 A op B ? C : A, by reversing the comparison first.
4663 Return a folded expression whose code is not a COND_EXPR
4664 anymore, or NULL_TREE if no folding opportunity is found. */
4666 static tree
4667 fold_cond_expr_with_comparison (location_t loc, tree type,
4668 tree arg0, tree arg1, tree arg2)
4670 enum tree_code comp_code = TREE_CODE (arg0);
4671 tree arg00 = TREE_OPERAND (arg0, 0);
4672 tree arg01 = TREE_OPERAND (arg0, 1);
4673 tree arg1_type = TREE_TYPE (arg1);
4674 tree tem;
4676 STRIP_NOPS (arg1);
4677 STRIP_NOPS (arg2);
4679 /* If we have A op 0 ? A : -A, consider applying the following
4680 transformations:
4682 A == 0? A : -A same as -A
4683 A != 0? A : -A same as A
4684 A >= 0? A : -A same as abs (A)
4685 A > 0? A : -A same as abs (A)
4686 A <= 0? A : -A same as -abs (A)
4687 A < 0? A : -A same as -abs (A)
4689 None of these transformations work for modes with signed
4690 zeros. If A is +/-0, the first two transformations will
4691 change the sign of the result (from +0 to -0, or vice
4692 versa). The last four will fix the sign of the result,
4693 even though the original expressions could be positive or
4694 negative, depending on the sign of A.
4696 Note that all these transformations are correct if A is
4697 NaN, since the two alternatives (A and -A) are also NaNs. */
4698 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4699 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4700 ? real_zerop (arg01)
4701 : integer_zerop (arg01))
4702 && ((TREE_CODE (arg2) == NEGATE_EXPR
4703 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4704 /* In the case that A is of the form X-Y, '-A' (arg2) may
4705 have already been folded to Y-X, check for that. */
4706 || (TREE_CODE (arg1) == MINUS_EXPR
4707 && TREE_CODE (arg2) == MINUS_EXPR
4708 && operand_equal_p (TREE_OPERAND (arg1, 0),
4709 TREE_OPERAND (arg2, 1), 0)
4710 && operand_equal_p (TREE_OPERAND (arg1, 1),
4711 TREE_OPERAND (arg2, 0), 0))))
4712 switch (comp_code)
4714 case EQ_EXPR:
4715 case UNEQ_EXPR:
4716 tem = fold_convert_loc (loc, arg1_type, arg1);
4717 return pedantic_non_lvalue_loc (loc,
4718 fold_convert_loc (loc, type,
4719 negate_expr (tem)));
4720 case NE_EXPR:
4721 case LTGT_EXPR:
4722 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4723 case UNGE_EXPR:
4724 case UNGT_EXPR:
4725 if (flag_trapping_math)
4726 break;
4727 /* Fall through. */
4728 case GE_EXPR:
4729 case GT_EXPR:
4730 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4731 arg1 = fold_convert_loc (loc, signed_type_for
4732 (TREE_TYPE (arg1)), arg1);
4733 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4734 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4735 case UNLE_EXPR:
4736 case UNLT_EXPR:
4737 if (flag_trapping_math)
4738 break;
4739 case LE_EXPR:
4740 case LT_EXPR:
4741 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4742 arg1 = fold_convert_loc (loc, signed_type_for
4743 (TREE_TYPE (arg1)), arg1);
4744 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4745 return negate_expr (fold_convert_loc (loc, type, tem));
4746 default:
4747 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4748 break;
4751 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4752 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4753 both transformations are correct when A is NaN: A != 0
4754 is then true, and A == 0 is false. */
4756 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4757 && integer_zerop (arg01) && integer_zerop (arg2))
4759 if (comp_code == NE_EXPR)
4760 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4761 else if (comp_code == EQ_EXPR)
4762 return build_zero_cst (type);
4765 /* Try some transformations of A op B ? A : B.
4767 A == B? A : B same as B
4768 A != B? A : B same as A
4769 A >= B? A : B same as max (A, B)
4770 A > B? A : B same as max (B, A)
4771 A <= B? A : B same as min (A, B)
4772 A < B? A : B same as min (B, A)
4774 As above, these transformations don't work in the presence
4775 of signed zeros. For example, if A and B are zeros of
4776 opposite sign, the first two transformations will change
4777 the sign of the result. In the last four, the original
4778 expressions give different results for (A=+0, B=-0) and
4779 (A=-0, B=+0), but the transformed expressions do not.
4781 The first two transformations are correct if either A or B
4782 is a NaN. In the first transformation, the condition will
4783 be false, and B will indeed be chosen. In the case of the
4784 second transformation, the condition A != B will be true,
4785 and A will be chosen.
4787 The conversions to max() and min() are not correct if B is
4788 a number and A is not. The conditions in the original
4789 expressions will be false, so all four give B. The min()
4790 and max() versions would give a NaN instead. */
4791 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4792 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4793 /* Avoid these transformations if the COND_EXPR may be used
4794 as an lvalue in the C++ front-end. PR c++/19199. */
4795 && (in_gimple_form
4796 || VECTOR_TYPE_P (type)
4797 || (strcmp (lang_hooks.name, "GNU C++") != 0
4798 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4799 || ! maybe_lvalue_p (arg1)
4800 || ! maybe_lvalue_p (arg2)))
4802 tree comp_op0 = arg00;
4803 tree comp_op1 = arg01;
4804 tree comp_type = TREE_TYPE (comp_op0);
4806 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4807 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4809 comp_type = type;
4810 comp_op0 = arg1;
4811 comp_op1 = arg2;
4814 switch (comp_code)
4816 case EQ_EXPR:
4817 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4818 case NE_EXPR:
4819 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4820 case LE_EXPR:
4821 case LT_EXPR:
4822 case UNLE_EXPR:
4823 case UNLT_EXPR:
4824 /* In C++ a ?: expression can be an lvalue, so put the
4825 operand which will be used if they are equal first
4826 so that we can convert this back to the
4827 corresponding COND_EXPR. */
4828 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4830 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4831 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4832 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4833 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4834 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4835 comp_op1, comp_op0);
4836 return pedantic_non_lvalue_loc (loc,
4837 fold_convert_loc (loc, type, tem));
4839 break;
4840 case GE_EXPR:
4841 case GT_EXPR:
4842 case UNGE_EXPR:
4843 case UNGT_EXPR:
4844 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4846 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4847 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4848 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4849 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4850 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4851 comp_op1, comp_op0);
4852 return pedantic_non_lvalue_loc (loc,
4853 fold_convert_loc (loc, type, tem));
4855 break;
4856 case UNEQ_EXPR:
4857 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4858 return pedantic_non_lvalue_loc (loc,
4859 fold_convert_loc (loc, type, arg2));
4860 break;
4861 case LTGT_EXPR:
4862 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4863 return pedantic_non_lvalue_loc (loc,
4864 fold_convert_loc (loc, type, arg1));
4865 break;
4866 default:
4867 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4868 break;
4872 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4873 we might still be able to simplify this. For example,
4874 if C1 is one less or one more than C2, this might have started
4875 out as a MIN or MAX and been transformed by this function.
4876 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4878 if (INTEGRAL_TYPE_P (type)
4879 && TREE_CODE (arg01) == INTEGER_CST
4880 && TREE_CODE (arg2) == INTEGER_CST)
4881 switch (comp_code)
4883 case EQ_EXPR:
4884 if (TREE_CODE (arg1) == INTEGER_CST)
4885 break;
4886 /* We can replace A with C1 in this case. */
4887 arg1 = fold_convert_loc (loc, type, arg01);
4888 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4890 case LT_EXPR:
4891 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4892 MIN_EXPR, to preserve the signedness of the comparison. */
4893 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4894 OEP_ONLY_CONST)
4895 && operand_equal_p (arg01,
4896 const_binop (PLUS_EXPR, arg2,
4897 build_int_cst (type, 1)),
4898 OEP_ONLY_CONST))
4900 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4901 fold_convert_loc (loc, TREE_TYPE (arg00),
4902 arg2));
4903 return pedantic_non_lvalue_loc (loc,
4904 fold_convert_loc (loc, type, tem));
4906 break;
4908 case LE_EXPR:
4909 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4910 as above. */
4911 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4912 OEP_ONLY_CONST)
4913 && operand_equal_p (arg01,
4914 const_binop (MINUS_EXPR, arg2,
4915 build_int_cst (type, 1)),
4916 OEP_ONLY_CONST))
4918 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4919 fold_convert_loc (loc, TREE_TYPE (arg00),
4920 arg2));
4921 return pedantic_non_lvalue_loc (loc,
4922 fold_convert_loc (loc, type, tem));
4924 break;
4926 case GT_EXPR:
4927 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4928 MAX_EXPR, to preserve the signedness of the comparison. */
4929 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4930 OEP_ONLY_CONST)
4931 && operand_equal_p (arg01,
4932 const_binop (MINUS_EXPR, arg2,
4933 build_int_cst (type, 1)),
4934 OEP_ONLY_CONST))
4936 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4937 fold_convert_loc (loc, TREE_TYPE (arg00),
4938 arg2));
4939 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4941 break;
4943 case GE_EXPR:
4944 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4945 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4946 OEP_ONLY_CONST)
4947 && operand_equal_p (arg01,
4948 const_binop (PLUS_EXPR, arg2,
4949 build_int_cst (type, 1)),
4950 OEP_ONLY_CONST))
4952 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4953 fold_convert_loc (loc, TREE_TYPE (arg00),
4954 arg2));
4955 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4957 break;
4958 case NE_EXPR:
4959 break;
4960 default:
4961 gcc_unreachable ();
4964 return NULL_TREE;
4969 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4970 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4971 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4972 false) >= 2)
4973 #endif
4975 /* EXP is some logical combination of boolean tests. See if we can
4976 merge it into some range test. Return the new tree if so. */
4978 static tree
4979 fold_range_test (location_t loc, enum tree_code code, tree type,
4980 tree op0, tree op1)
4982 int or_op = (code == TRUTH_ORIF_EXPR
4983 || code == TRUTH_OR_EXPR);
4984 int in0_p, in1_p, in_p;
4985 tree low0, low1, low, high0, high1, high;
4986 bool strict_overflow_p = false;
4987 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4988 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4989 tree tem;
4990 const char * const warnmsg = G_("assuming signed overflow does not occur "
4991 "when simplifying range test");
4993 /* If this is an OR operation, invert both sides; we will invert
4994 again at the end. */
4995 if (or_op)
4996 in0_p = ! in0_p, in1_p = ! in1_p;
4998 /* If both expressions are the same, if we can merge the ranges, and we
4999 can build the range test, return it or it inverted. If one of the
5000 ranges is always true or always false, consider it to be the same
5001 expression as the other. */
5002 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5003 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5004 in1_p, low1, high1)
5005 && 0 != (tem = (build_range_check (loc, type,
5006 lhs != 0 ? lhs
5007 : rhs != 0 ? rhs : integer_zero_node,
5008 in_p, low, high))))
5010 if (strict_overflow_p)
5011 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5012 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5015 /* On machines where the branch cost is expensive, if this is a
5016 short-circuited branch and the underlying object on both sides
5017 is the same, make a non-short-circuit operation. */
5018 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5019 && lhs != 0 && rhs != 0
5020 && (code == TRUTH_ANDIF_EXPR
5021 || code == TRUTH_ORIF_EXPR)
5022 && operand_equal_p (lhs, rhs, 0))
5024 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5025 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5026 which cases we can't do this. */
5027 if (simple_operand_p (lhs))
5028 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5029 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5030 type, op0, op1);
5032 else if (!lang_hooks.decls.global_bindings_p ()
5033 && !CONTAINS_PLACEHOLDER_P (lhs))
5035 tree common = save_expr (lhs);
5037 if (0 != (lhs = build_range_check (loc, type, common,
5038 or_op ? ! in0_p : in0_p,
5039 low0, high0))
5040 && (0 != (rhs = build_range_check (loc, type, common,
5041 or_op ? ! in1_p : in1_p,
5042 low1, high1))))
5044 if (strict_overflow_p)
5045 fold_overflow_warning (warnmsg,
5046 WARN_STRICT_OVERFLOW_COMPARISON);
5047 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5048 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5049 type, lhs, rhs);
5054 return 0;
5057 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5058 bit value. Arrange things so the extra bits will be set to zero if and
5059 only if C is signed-extended to its full width. If MASK is nonzero,
5060 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5062 static tree
5063 unextend (tree c, int p, int unsignedp, tree mask)
5065 tree type = TREE_TYPE (c);
5066 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5067 tree temp;
5069 if (p == modesize || unsignedp)
5070 return c;
5072 /* We work by getting just the sign bit into the low-order bit, then
5073 into the high-order bit, then sign-extend. We then XOR that value
5074 with C. */
5075 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5076 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5078 /* We must use a signed type in order to get an arithmetic right shift.
5079 However, we must also avoid introducing accidental overflows, so that
5080 a subsequent call to integer_zerop will work. Hence we must
5081 do the type conversion here. At this point, the constant is either
5082 zero or one, and the conversion to a signed type can never overflow.
5083 We could get an overflow if this conversion is done anywhere else. */
5084 if (TYPE_UNSIGNED (type))
5085 temp = fold_convert (signed_type_for (type), temp);
5087 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5088 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5089 if (mask != 0)
5090 temp = const_binop (BIT_AND_EXPR, temp,
5091 fold_convert (TREE_TYPE (c), mask));
5092 /* If necessary, convert the type back to match the type of C. */
5093 if (TYPE_UNSIGNED (type))
5094 temp = fold_convert (type, temp);
5096 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5099 /* For an expression that has the form
5100 (A && B) || ~B
5102 (A || B) && ~B,
5103 we can drop one of the inner expressions and simplify to
5104 A || ~B
5106 A && ~B
5107 LOC is the location of the resulting expression. OP is the inner
5108 logical operation; the left-hand side in the examples above, while CMPOP
5109 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5110 removing a condition that guards another, as in
5111 (A != NULL && A->...) || A == NULL
5112 which we must not transform. If RHS_ONLY is true, only eliminate the
5113 right-most operand of the inner logical operation. */
5115 static tree
5116 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5117 bool rhs_only)
5119 tree type = TREE_TYPE (cmpop);
5120 enum tree_code code = TREE_CODE (cmpop);
5121 enum tree_code truthop_code = TREE_CODE (op);
5122 tree lhs = TREE_OPERAND (op, 0);
5123 tree rhs = TREE_OPERAND (op, 1);
5124 tree orig_lhs = lhs, orig_rhs = rhs;
5125 enum tree_code rhs_code = TREE_CODE (rhs);
5126 enum tree_code lhs_code = TREE_CODE (lhs);
5127 enum tree_code inv_code;
5129 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5130 return NULL_TREE;
5132 if (TREE_CODE_CLASS (code) != tcc_comparison)
5133 return NULL_TREE;
5135 if (rhs_code == truthop_code)
5137 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5138 if (newrhs != NULL_TREE)
5140 rhs = newrhs;
5141 rhs_code = TREE_CODE (rhs);
5144 if (lhs_code == truthop_code && !rhs_only)
5146 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5147 if (newlhs != NULL_TREE)
5149 lhs = newlhs;
5150 lhs_code = TREE_CODE (lhs);
5154 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5155 if (inv_code == rhs_code
5156 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5157 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5158 return lhs;
5159 if (!rhs_only && inv_code == lhs_code
5160 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5161 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5162 return rhs;
5163 if (rhs != orig_rhs || lhs != orig_lhs)
5164 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5165 lhs, rhs);
5166 return NULL_TREE;
5169 /* Find ways of folding logical expressions of LHS and RHS:
5170 Try to merge two comparisons to the same innermost item.
5171 Look for range tests like "ch >= '0' && ch <= '9'".
5172 Look for combinations of simple terms on machines with expensive branches
5173 and evaluate the RHS unconditionally.
5175 For example, if we have p->a == 2 && p->b == 4 and we can make an
5176 object large enough to span both A and B, we can do this with a comparison
5177 against the object ANDed with the a mask.
5179 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5180 operations to do this with one comparison.
5182 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5183 function and the one above.
5185 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5186 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5188 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5189 two operands.
5191 We return the simplified tree or 0 if no optimization is possible. */
5193 static tree
5194 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5195 tree lhs, tree rhs)
5197 /* If this is the "or" of two comparisons, we can do something if
5198 the comparisons are NE_EXPR. If this is the "and", we can do something
5199 if the comparisons are EQ_EXPR. I.e.,
5200 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5202 WANTED_CODE is this operation code. For single bit fields, we can
5203 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5204 comparison for one-bit fields. */
5206 enum tree_code wanted_code;
5207 enum tree_code lcode, rcode;
5208 tree ll_arg, lr_arg, rl_arg, rr_arg;
5209 tree ll_inner, lr_inner, rl_inner, rr_inner;
5210 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5211 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5212 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5213 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5214 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5215 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5216 enum machine_mode lnmode, rnmode;
5217 tree ll_mask, lr_mask, rl_mask, rr_mask;
5218 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5219 tree l_const, r_const;
5220 tree lntype, rntype, result;
5221 HOST_WIDE_INT first_bit, end_bit;
5222 int volatilep;
5224 /* Start by getting the comparison codes. Fail if anything is volatile.
5225 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5226 it were surrounded with a NE_EXPR. */
5228 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5229 return 0;
5231 lcode = TREE_CODE (lhs);
5232 rcode = TREE_CODE (rhs);
5234 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5236 lhs = build2 (NE_EXPR, truth_type, lhs,
5237 build_int_cst (TREE_TYPE (lhs), 0));
5238 lcode = NE_EXPR;
5241 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5243 rhs = build2 (NE_EXPR, truth_type, rhs,
5244 build_int_cst (TREE_TYPE (rhs), 0));
5245 rcode = NE_EXPR;
5248 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5249 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5250 return 0;
5252 ll_arg = TREE_OPERAND (lhs, 0);
5253 lr_arg = TREE_OPERAND (lhs, 1);
5254 rl_arg = TREE_OPERAND (rhs, 0);
5255 rr_arg = TREE_OPERAND (rhs, 1);
5257 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5258 if (simple_operand_p (ll_arg)
5259 && simple_operand_p (lr_arg))
5261 if (operand_equal_p (ll_arg, rl_arg, 0)
5262 && operand_equal_p (lr_arg, rr_arg, 0))
5264 result = combine_comparisons (loc, code, lcode, rcode,
5265 truth_type, ll_arg, lr_arg);
5266 if (result)
5267 return result;
5269 else if (operand_equal_p (ll_arg, rr_arg, 0)
5270 && operand_equal_p (lr_arg, rl_arg, 0))
5272 result = combine_comparisons (loc, code, lcode,
5273 swap_tree_comparison (rcode),
5274 truth_type, ll_arg, lr_arg);
5275 if (result)
5276 return result;
5280 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5281 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5283 /* If the RHS can be evaluated unconditionally and its operands are
5284 simple, it wins to evaluate the RHS unconditionally on machines
5285 with expensive branches. In this case, this isn't a comparison
5286 that can be merged. */
5288 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5289 false) >= 2
5290 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5291 && simple_operand_p (rl_arg)
5292 && simple_operand_p (rr_arg))
5294 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5295 if (code == TRUTH_OR_EXPR
5296 && lcode == NE_EXPR && integer_zerop (lr_arg)
5297 && rcode == NE_EXPR && integer_zerop (rr_arg)
5298 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5299 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5300 return build2_loc (loc, NE_EXPR, truth_type,
5301 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5302 ll_arg, rl_arg),
5303 build_int_cst (TREE_TYPE (ll_arg), 0));
5305 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5306 if (code == TRUTH_AND_EXPR
5307 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5308 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5309 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5310 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5311 return build2_loc (loc, EQ_EXPR, truth_type,
5312 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5313 ll_arg, rl_arg),
5314 build_int_cst (TREE_TYPE (ll_arg), 0));
5317 /* See if the comparisons can be merged. Then get all the parameters for
5318 each side. */
5320 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5321 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5322 return 0;
5324 volatilep = 0;
5325 ll_inner = decode_field_reference (loc, ll_arg,
5326 &ll_bitsize, &ll_bitpos, &ll_mode,
5327 &ll_unsignedp, &volatilep, &ll_mask,
5328 &ll_and_mask);
5329 lr_inner = decode_field_reference (loc, lr_arg,
5330 &lr_bitsize, &lr_bitpos, &lr_mode,
5331 &lr_unsignedp, &volatilep, &lr_mask,
5332 &lr_and_mask);
5333 rl_inner = decode_field_reference (loc, rl_arg,
5334 &rl_bitsize, &rl_bitpos, &rl_mode,
5335 &rl_unsignedp, &volatilep, &rl_mask,
5336 &rl_and_mask);
5337 rr_inner = decode_field_reference (loc, rr_arg,
5338 &rr_bitsize, &rr_bitpos, &rr_mode,
5339 &rr_unsignedp, &volatilep, &rr_mask,
5340 &rr_and_mask);
5342 /* It must be true that the inner operation on the lhs of each
5343 comparison must be the same if we are to be able to do anything.
5344 Then see if we have constants. If not, the same must be true for
5345 the rhs's. */
5346 if (volatilep || ll_inner == 0 || rl_inner == 0
5347 || ! operand_equal_p (ll_inner, rl_inner, 0))
5348 return 0;
5350 if (TREE_CODE (lr_arg) == INTEGER_CST
5351 && TREE_CODE (rr_arg) == INTEGER_CST)
5352 l_const = lr_arg, r_const = rr_arg;
5353 else if (lr_inner == 0 || rr_inner == 0
5354 || ! operand_equal_p (lr_inner, rr_inner, 0))
5355 return 0;
5356 else
5357 l_const = r_const = 0;
5359 /* If either comparison code is not correct for our logical operation,
5360 fail. However, we can convert a one-bit comparison against zero into
5361 the opposite comparison against that bit being set in the field. */
5363 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5364 if (lcode != wanted_code)
5366 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5368 /* Make the left operand unsigned, since we are only interested
5369 in the value of one bit. Otherwise we are doing the wrong
5370 thing below. */
5371 ll_unsignedp = 1;
5372 l_const = ll_mask;
5374 else
5375 return 0;
5378 /* This is analogous to the code for l_const above. */
5379 if (rcode != wanted_code)
5381 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5383 rl_unsignedp = 1;
5384 r_const = rl_mask;
5386 else
5387 return 0;
5390 /* See if we can find a mode that contains both fields being compared on
5391 the left. If we can't, fail. Otherwise, update all constants and masks
5392 to be relative to a field of that size. */
5393 first_bit = MIN (ll_bitpos, rl_bitpos);
5394 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5395 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5396 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5397 volatilep);
5398 if (lnmode == VOIDmode)
5399 return 0;
5401 lnbitsize = GET_MODE_BITSIZE (lnmode);
5402 lnbitpos = first_bit & ~ (lnbitsize - 1);
5403 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5404 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5406 if (BYTES_BIG_ENDIAN)
5408 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5409 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5412 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5413 size_int (xll_bitpos));
5414 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5415 size_int (xrl_bitpos));
5417 if (l_const)
5419 l_const = fold_convert_loc (loc, lntype, l_const);
5420 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5421 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5422 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5423 fold_build1_loc (loc, BIT_NOT_EXPR,
5424 lntype, ll_mask))))
5426 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5428 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5431 if (r_const)
5433 r_const = fold_convert_loc (loc, lntype, r_const);
5434 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5435 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5436 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5437 fold_build1_loc (loc, BIT_NOT_EXPR,
5438 lntype, rl_mask))))
5440 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5442 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5446 /* If the right sides are not constant, do the same for it. Also,
5447 disallow this optimization if a size or signedness mismatch occurs
5448 between the left and right sides. */
5449 if (l_const == 0)
5451 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5452 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5453 /* Make sure the two fields on the right
5454 correspond to the left without being swapped. */
5455 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5456 return 0;
5458 first_bit = MIN (lr_bitpos, rr_bitpos);
5459 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5460 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5461 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5462 volatilep);
5463 if (rnmode == VOIDmode)
5464 return 0;
5466 rnbitsize = GET_MODE_BITSIZE (rnmode);
5467 rnbitpos = first_bit & ~ (rnbitsize - 1);
5468 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5469 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5471 if (BYTES_BIG_ENDIAN)
5473 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5474 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5477 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5478 rntype, lr_mask),
5479 size_int (xlr_bitpos));
5480 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5481 rntype, rr_mask),
5482 size_int (xrr_bitpos));
5484 /* Make a mask that corresponds to both fields being compared.
5485 Do this for both items being compared. If the operands are the
5486 same size and the bits being compared are in the same position
5487 then we can do this by masking both and comparing the masked
5488 results. */
5489 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5490 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5491 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5493 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5494 ll_unsignedp || rl_unsignedp);
5495 if (! all_ones_mask_p (ll_mask, lnbitsize))
5496 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5498 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5499 lr_unsignedp || rr_unsignedp);
5500 if (! all_ones_mask_p (lr_mask, rnbitsize))
5501 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5503 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5506 /* There is still another way we can do something: If both pairs of
5507 fields being compared are adjacent, we may be able to make a wider
5508 field containing them both.
5510 Note that we still must mask the lhs/rhs expressions. Furthermore,
5511 the mask must be shifted to account for the shift done by
5512 make_bit_field_ref. */
5513 if ((ll_bitsize + ll_bitpos == rl_bitpos
5514 && lr_bitsize + lr_bitpos == rr_bitpos)
5515 || (ll_bitpos == rl_bitpos + rl_bitsize
5516 && lr_bitpos == rr_bitpos + rr_bitsize))
5518 tree type;
5520 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5521 ll_bitsize + rl_bitsize,
5522 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5523 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5524 lr_bitsize + rr_bitsize,
5525 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5527 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5528 size_int (MIN (xll_bitpos, xrl_bitpos)));
5529 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5530 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5532 /* Convert to the smaller type before masking out unwanted bits. */
5533 type = lntype;
5534 if (lntype != rntype)
5536 if (lnbitsize > rnbitsize)
5538 lhs = fold_convert_loc (loc, rntype, lhs);
5539 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5540 type = rntype;
5542 else if (lnbitsize < rnbitsize)
5544 rhs = fold_convert_loc (loc, lntype, rhs);
5545 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5546 type = lntype;
5550 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5551 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5553 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5554 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5556 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5559 return 0;
5562 /* Handle the case of comparisons with constants. If there is something in
5563 common between the masks, those bits of the constants must be the same.
5564 If not, the condition is always false. Test for this to avoid generating
5565 incorrect code below. */
5566 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5567 if (! integer_zerop (result)
5568 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5569 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5571 if (wanted_code == NE_EXPR)
5573 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5574 return constant_boolean_node (true, truth_type);
5576 else
5578 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5579 return constant_boolean_node (false, truth_type);
5583 /* Construct the expression we will return. First get the component
5584 reference we will make. Unless the mask is all ones the width of
5585 that field, perform the mask operation. Then compare with the
5586 merged constant. */
5587 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5588 ll_unsignedp || rl_unsignedp);
5590 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5591 if (! all_ones_mask_p (ll_mask, lnbitsize))
5592 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5594 return build2_loc (loc, wanted_code, truth_type, result,
5595 const_binop (BIT_IOR_EXPR, l_const, r_const));
5598 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5599 constant. */
5601 static tree
5602 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5603 tree op0, tree op1)
5605 tree arg0 = op0;
5606 enum tree_code op_code;
5607 tree comp_const;
5608 tree minmax_const;
5609 int consts_equal, consts_lt;
5610 tree inner;
5612 STRIP_SIGN_NOPS (arg0);
5614 op_code = TREE_CODE (arg0);
5615 minmax_const = TREE_OPERAND (arg0, 1);
5616 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5617 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5618 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5619 inner = TREE_OPERAND (arg0, 0);
5621 /* If something does not permit us to optimize, return the original tree. */
5622 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5623 || TREE_CODE (comp_const) != INTEGER_CST
5624 || TREE_OVERFLOW (comp_const)
5625 || TREE_CODE (minmax_const) != INTEGER_CST
5626 || TREE_OVERFLOW (minmax_const))
5627 return NULL_TREE;
5629 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5630 and GT_EXPR, doing the rest with recursive calls using logical
5631 simplifications. */
5632 switch (code)
5634 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5636 tree tem
5637 = optimize_minmax_comparison (loc,
5638 invert_tree_comparison (code, false),
5639 type, op0, op1);
5640 if (tem)
5641 return invert_truthvalue_loc (loc, tem);
5642 return NULL_TREE;
5645 case GE_EXPR:
5646 return
5647 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5648 optimize_minmax_comparison
5649 (loc, EQ_EXPR, type, arg0, comp_const),
5650 optimize_minmax_comparison
5651 (loc, GT_EXPR, type, arg0, comp_const));
5653 case EQ_EXPR:
5654 if (op_code == MAX_EXPR && consts_equal)
5655 /* MAX (X, 0) == 0 -> X <= 0 */
5656 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5658 else if (op_code == MAX_EXPR && consts_lt)
5659 /* MAX (X, 0) == 5 -> X == 5 */
5660 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5662 else if (op_code == MAX_EXPR)
5663 /* MAX (X, 0) == -1 -> false */
5664 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5666 else if (consts_equal)
5667 /* MIN (X, 0) == 0 -> X >= 0 */
5668 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5670 else if (consts_lt)
5671 /* MIN (X, 0) == 5 -> false */
5672 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5674 else
5675 /* MIN (X, 0) == -1 -> X == -1 */
5676 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5678 case GT_EXPR:
5679 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5680 /* MAX (X, 0) > 0 -> X > 0
5681 MAX (X, 0) > 5 -> X > 5 */
5682 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5684 else if (op_code == MAX_EXPR)
5685 /* MAX (X, 0) > -1 -> true */
5686 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5688 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5689 /* MIN (X, 0) > 0 -> false
5690 MIN (X, 0) > 5 -> false */
5691 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5693 else
5694 /* MIN (X, 0) > -1 -> X > -1 */
5695 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5697 default:
5698 return NULL_TREE;
5702 /* T is an integer expression that is being multiplied, divided, or taken a
5703 modulus (CODE says which and what kind of divide or modulus) by a
5704 constant C. See if we can eliminate that operation by folding it with
5705 other operations already in T. WIDE_TYPE, if non-null, is a type that
5706 should be used for the computation if wider than our type.
5708 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5709 (X * 2) + (Y * 4). We must, however, be assured that either the original
5710 expression would not overflow or that overflow is undefined for the type
5711 in the language in question.
5713 If we return a non-null expression, it is an equivalent form of the
5714 original computation, but need not be in the original type.
5716 We set *STRICT_OVERFLOW_P to true if the return values depends on
5717 signed overflow being undefined. Otherwise we do not change
5718 *STRICT_OVERFLOW_P. */
5720 static tree
5721 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5722 bool *strict_overflow_p)
5724 /* To avoid exponential search depth, refuse to allow recursion past
5725 three levels. Beyond that (1) it's highly unlikely that we'll find
5726 something interesting and (2) we've probably processed it before
5727 when we built the inner expression. */
5729 static int depth;
5730 tree ret;
5732 if (depth > 3)
5733 return NULL;
5735 depth++;
5736 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5737 depth--;
5739 return ret;
5742 static tree
5743 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5744 bool *strict_overflow_p)
5746 tree type = TREE_TYPE (t);
5747 enum tree_code tcode = TREE_CODE (t);
5748 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5749 > GET_MODE_SIZE (TYPE_MODE (type)))
5750 ? wide_type : type);
5751 tree t1, t2;
5752 int same_p = tcode == code;
5753 tree op0 = NULL_TREE, op1 = NULL_TREE;
5754 bool sub_strict_overflow_p;
5756 /* Don't deal with constants of zero here; they confuse the code below. */
5757 if (integer_zerop (c))
5758 return NULL_TREE;
5760 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5761 op0 = TREE_OPERAND (t, 0);
5763 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5764 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5766 /* Note that we need not handle conditional operations here since fold
5767 already handles those cases. So just do arithmetic here. */
5768 switch (tcode)
5770 case INTEGER_CST:
5771 /* For a constant, we can always simplify if we are a multiply
5772 or (for divide and modulus) if it is a multiple of our constant. */
5773 if (code == MULT_EXPR
5774 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5775 return const_binop (code, fold_convert (ctype, t),
5776 fold_convert (ctype, c));
5777 break;
5779 CASE_CONVERT: case NON_LVALUE_EXPR:
5780 /* If op0 is an expression ... */
5781 if ((COMPARISON_CLASS_P (op0)
5782 || UNARY_CLASS_P (op0)
5783 || BINARY_CLASS_P (op0)
5784 || VL_EXP_CLASS_P (op0)
5785 || EXPRESSION_CLASS_P (op0))
5786 /* ... and has wrapping overflow, and its type is smaller
5787 than ctype, then we cannot pass through as widening. */
5788 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5789 && (TYPE_PRECISION (ctype)
5790 > TYPE_PRECISION (TREE_TYPE (op0))))
5791 /* ... or this is a truncation (t is narrower than op0),
5792 then we cannot pass through this narrowing. */
5793 || (TYPE_PRECISION (type)
5794 < TYPE_PRECISION (TREE_TYPE (op0)))
5795 /* ... or signedness changes for division or modulus,
5796 then we cannot pass through this conversion. */
5797 || (code != MULT_EXPR
5798 && (TYPE_UNSIGNED (ctype)
5799 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5800 /* ... or has undefined overflow while the converted to
5801 type has not, we cannot do the operation in the inner type
5802 as that would introduce undefined overflow. */
5803 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5804 && !TYPE_OVERFLOW_UNDEFINED (type))))
5805 break;
5807 /* Pass the constant down and see if we can make a simplification. If
5808 we can, replace this expression with the inner simplification for
5809 possible later conversion to our or some other type. */
5810 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5811 && TREE_CODE (t2) == INTEGER_CST
5812 && !TREE_OVERFLOW (t2)
5813 && (0 != (t1 = extract_muldiv (op0, t2, code,
5814 code == MULT_EXPR
5815 ? ctype : NULL_TREE,
5816 strict_overflow_p))))
5817 return t1;
5818 break;
5820 case ABS_EXPR:
5821 /* If widening the type changes it from signed to unsigned, then we
5822 must avoid building ABS_EXPR itself as unsigned. */
5823 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5825 tree cstype = (*signed_type_for) (ctype);
5826 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5827 != 0)
5829 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5830 return fold_convert (ctype, t1);
5832 break;
5834 /* If the constant is negative, we cannot simplify this. */
5835 if (tree_int_cst_sgn (c) == -1)
5836 break;
5837 /* FALLTHROUGH */
5838 case NEGATE_EXPR:
5839 /* For division and modulus, type can't be unsigned, as e.g.
5840 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5841 For signed types, even with wrapping overflow, this is fine. */
5842 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5843 break;
5844 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5845 != 0)
5846 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5847 break;
5849 case MIN_EXPR: case MAX_EXPR:
5850 /* If widening the type changes the signedness, then we can't perform
5851 this optimization as that changes the result. */
5852 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5853 break;
5855 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5856 sub_strict_overflow_p = false;
5857 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5858 &sub_strict_overflow_p)) != 0
5859 && (t2 = extract_muldiv (op1, c, code, wide_type,
5860 &sub_strict_overflow_p)) != 0)
5862 if (tree_int_cst_sgn (c) < 0)
5863 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5864 if (sub_strict_overflow_p)
5865 *strict_overflow_p = true;
5866 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5867 fold_convert (ctype, t2));
5869 break;
5871 case LSHIFT_EXPR: case RSHIFT_EXPR:
5872 /* If the second operand is constant, this is a multiplication
5873 or floor division, by a power of two, so we can treat it that
5874 way unless the multiplier or divisor overflows. Signed
5875 left-shift overflow is implementation-defined rather than
5876 undefined in C90, so do not convert signed left shift into
5877 multiplication. */
5878 if (TREE_CODE (op1) == INTEGER_CST
5879 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5880 /* const_binop may not detect overflow correctly,
5881 so check for it explicitly here. */
5882 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5883 && TREE_INT_CST_HIGH (op1) == 0
5884 && 0 != (t1 = fold_convert (ctype,
5885 const_binop (LSHIFT_EXPR,
5886 size_one_node,
5887 op1)))
5888 && !TREE_OVERFLOW (t1))
5889 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5890 ? MULT_EXPR : FLOOR_DIV_EXPR,
5891 ctype,
5892 fold_convert (ctype, op0),
5893 t1),
5894 c, code, wide_type, strict_overflow_p);
5895 break;
5897 case PLUS_EXPR: case MINUS_EXPR:
5898 /* See if we can eliminate the operation on both sides. If we can, we
5899 can return a new PLUS or MINUS. If we can't, the only remaining
5900 cases where we can do anything are if the second operand is a
5901 constant. */
5902 sub_strict_overflow_p = false;
5903 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5904 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5905 if (t1 != 0 && t2 != 0
5906 && (code == MULT_EXPR
5907 /* If not multiplication, we can only do this if both operands
5908 are divisible by c. */
5909 || (multiple_of_p (ctype, op0, c)
5910 && multiple_of_p (ctype, op1, c))))
5912 if (sub_strict_overflow_p)
5913 *strict_overflow_p = true;
5914 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5915 fold_convert (ctype, t2));
5918 /* If this was a subtraction, negate OP1 and set it to be an addition.
5919 This simplifies the logic below. */
5920 if (tcode == MINUS_EXPR)
5922 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5923 /* If OP1 was not easily negatable, the constant may be OP0. */
5924 if (TREE_CODE (op0) == INTEGER_CST)
5926 tree tem = op0;
5927 op0 = op1;
5928 op1 = tem;
5929 tem = t1;
5930 t1 = t2;
5931 t2 = tem;
5935 if (TREE_CODE (op1) != INTEGER_CST)
5936 break;
5938 /* If either OP1 or C are negative, this optimization is not safe for
5939 some of the division and remainder types while for others we need
5940 to change the code. */
5941 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5943 if (code == CEIL_DIV_EXPR)
5944 code = FLOOR_DIV_EXPR;
5945 else if (code == FLOOR_DIV_EXPR)
5946 code = CEIL_DIV_EXPR;
5947 else if (code != MULT_EXPR
5948 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5949 break;
5952 /* If it's a multiply or a division/modulus operation of a multiple
5953 of our constant, do the operation and verify it doesn't overflow. */
5954 if (code == MULT_EXPR
5955 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5957 op1 = const_binop (code, fold_convert (ctype, op1),
5958 fold_convert (ctype, c));
5959 /* We allow the constant to overflow with wrapping semantics. */
5960 if (op1 == 0
5961 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5962 break;
5964 else
5965 break;
5967 /* If we have an unsigned type, we cannot widen the operation since it
5968 will change the result if the original computation overflowed. */
5969 if (TYPE_UNSIGNED (ctype) && ctype != type)
5970 break;
5972 /* If we were able to eliminate our operation from the first side,
5973 apply our operation to the second side and reform the PLUS. */
5974 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5975 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5977 /* The last case is if we are a multiply. In that case, we can
5978 apply the distributive law to commute the multiply and addition
5979 if the multiplication of the constants doesn't overflow
5980 and overflow is defined. With undefined overflow
5981 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5982 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5983 return fold_build2 (tcode, ctype,
5984 fold_build2 (code, ctype,
5985 fold_convert (ctype, op0),
5986 fold_convert (ctype, c)),
5987 op1);
5989 break;
5991 case MULT_EXPR:
5992 /* We have a special case here if we are doing something like
5993 (C * 8) % 4 since we know that's zero. */
5994 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5995 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5996 /* If the multiplication can overflow we cannot optimize this. */
5997 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5998 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5999 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6001 *strict_overflow_p = true;
6002 return omit_one_operand (type, integer_zero_node, op0);
6005 /* ... fall through ... */
6007 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6008 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6009 /* If we can extract our operation from the LHS, do so and return a
6010 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6011 do something only if the second operand is a constant. */
6012 if (same_p
6013 && (t1 = extract_muldiv (op0, c, code, wide_type,
6014 strict_overflow_p)) != 0)
6015 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6016 fold_convert (ctype, op1));
6017 else if (tcode == MULT_EXPR && code == MULT_EXPR
6018 && (t1 = extract_muldiv (op1, c, code, wide_type,
6019 strict_overflow_p)) != 0)
6020 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6021 fold_convert (ctype, t1));
6022 else if (TREE_CODE (op1) != INTEGER_CST)
6023 return 0;
6025 /* If these are the same operation types, we can associate them
6026 assuming no overflow. */
6027 if (tcode == code)
6029 double_int mul;
6030 bool overflow_p;
6031 unsigned prec = TYPE_PRECISION (ctype);
6032 bool uns = TYPE_UNSIGNED (ctype);
6033 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6034 double_int dic = tree_to_double_int (c).ext (prec, uns);
6035 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6036 overflow_p = ((!uns && overflow_p)
6037 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6038 if (!double_int_fits_to_tree_p (ctype, mul)
6039 && ((uns && tcode != MULT_EXPR) || !uns))
6040 overflow_p = 1;
6041 if (!overflow_p)
6042 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6043 double_int_to_tree (ctype, mul));
6046 /* If these operations "cancel" each other, we have the main
6047 optimizations of this pass, which occur when either constant is a
6048 multiple of the other, in which case we replace this with either an
6049 operation or CODE or TCODE.
6051 If we have an unsigned type, we cannot do this since it will change
6052 the result if the original computation overflowed. */
6053 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6054 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6055 || (tcode == MULT_EXPR
6056 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6057 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6058 && code != MULT_EXPR)))
6060 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6062 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6063 *strict_overflow_p = true;
6064 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6065 fold_convert (ctype,
6066 const_binop (TRUNC_DIV_EXPR,
6067 op1, c)));
6069 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6071 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6072 *strict_overflow_p = true;
6073 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6074 fold_convert (ctype,
6075 const_binop (TRUNC_DIV_EXPR,
6076 c, op1)));
6079 break;
6081 default:
6082 break;
6085 return 0;
6088 /* Return a node which has the indicated constant VALUE (either 0 or
6089 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6090 and is of the indicated TYPE. */
6092 tree
6093 constant_boolean_node (bool value, tree type)
6095 if (type == integer_type_node)
6096 return value ? integer_one_node : integer_zero_node;
6097 else if (type == boolean_type_node)
6098 return value ? boolean_true_node : boolean_false_node;
6099 else if (TREE_CODE (type) == VECTOR_TYPE)
6100 return build_vector_from_val (type,
6101 build_int_cst (TREE_TYPE (type),
6102 value ? -1 : 0));
6103 else
6104 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6108 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6109 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6110 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6111 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6112 COND is the first argument to CODE; otherwise (as in the example
6113 given here), it is the second argument. TYPE is the type of the
6114 original expression. Return NULL_TREE if no simplification is
6115 possible. */
6117 static tree
6118 fold_binary_op_with_conditional_arg (location_t loc,
6119 enum tree_code code,
6120 tree type, tree op0, tree op1,
6121 tree cond, tree arg, int cond_first_p)
6123 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6124 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6125 tree test, true_value, false_value;
6126 tree lhs = NULL_TREE;
6127 tree rhs = NULL_TREE;
6128 enum tree_code cond_code = COND_EXPR;
6130 if (TREE_CODE (cond) == COND_EXPR
6131 || TREE_CODE (cond) == VEC_COND_EXPR)
6133 test = TREE_OPERAND (cond, 0);
6134 true_value = TREE_OPERAND (cond, 1);
6135 false_value = TREE_OPERAND (cond, 2);
6136 /* If this operand throws an expression, then it does not make
6137 sense to try to perform a logical or arithmetic operation
6138 involving it. */
6139 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6140 lhs = true_value;
6141 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6142 rhs = false_value;
6144 else
6146 tree testtype = TREE_TYPE (cond);
6147 test = cond;
6148 true_value = constant_boolean_node (true, testtype);
6149 false_value = constant_boolean_node (false, testtype);
6152 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6153 cond_code = VEC_COND_EXPR;
6155 /* This transformation is only worthwhile if we don't have to wrap ARG
6156 in a SAVE_EXPR and the operation can be simplified without recursing
6157 on at least one of the branches once its pushed inside the COND_EXPR. */
6158 if (!TREE_CONSTANT (arg)
6159 && (TREE_SIDE_EFFECTS (arg)
6160 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6161 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6162 return NULL_TREE;
6164 arg = fold_convert_loc (loc, arg_type, arg);
6165 if (lhs == 0)
6167 true_value = fold_convert_loc (loc, cond_type, true_value);
6168 if (cond_first_p)
6169 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6170 else
6171 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6173 if (rhs == 0)
6175 false_value = fold_convert_loc (loc, cond_type, false_value);
6176 if (cond_first_p)
6177 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6178 else
6179 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6182 /* Check that we have simplified at least one of the branches. */
6183 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6184 return NULL_TREE;
6186 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6190 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6192 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6193 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6194 ADDEND is the same as X.
6196 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6197 and finite. The problematic cases are when X is zero, and its mode
6198 has signed zeros. In the case of rounding towards -infinity,
6199 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6200 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6202 bool
6203 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6205 if (!real_zerop (addend))
6206 return false;
6208 /* Don't allow the fold with -fsignaling-nans. */
6209 if (HONOR_SNANS (TYPE_MODE (type)))
6210 return false;
6212 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6213 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6214 return true;
6216 /* In a vector or complex, we would need to check the sign of all zeros. */
6217 if (TREE_CODE (addend) != REAL_CST)
6218 return false;
6220 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6221 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6222 negate = !negate;
6224 /* The mode has signed zeros, and we have to honor their sign.
6225 In this situation, there is only one case we can return true for.
6226 X - 0 is the same as X unless rounding towards -infinity is
6227 supported. */
6228 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6231 /* Subroutine of fold() that checks comparisons of built-in math
6232 functions against real constants.
6234 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6235 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6236 is the type of the result and ARG0 and ARG1 are the operands of the
6237 comparison. ARG1 must be a TREE_REAL_CST.
6239 The function returns the constant folded tree if a simplification
6240 can be made, and NULL_TREE otherwise. */
6242 static tree
6243 fold_mathfn_compare (location_t loc,
6244 enum built_in_function fcode, enum tree_code code,
6245 tree type, tree arg0, tree arg1)
6247 REAL_VALUE_TYPE c;
6249 if (BUILTIN_SQRT_P (fcode))
6251 tree arg = CALL_EXPR_ARG (arg0, 0);
6252 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6254 c = TREE_REAL_CST (arg1);
6255 if (REAL_VALUE_NEGATIVE (c))
6257 /* sqrt(x) < y is always false, if y is negative. */
6258 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6259 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6261 /* sqrt(x) > y is always true, if y is negative and we
6262 don't care about NaNs, i.e. negative values of x. */
6263 if (code == NE_EXPR || !HONOR_NANS (mode))
6264 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6266 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6267 return fold_build2_loc (loc, GE_EXPR, type, arg,
6268 build_real (TREE_TYPE (arg), dconst0));
6270 else if (code == GT_EXPR || code == GE_EXPR)
6272 REAL_VALUE_TYPE c2;
6274 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6275 real_convert (&c2, mode, &c2);
6277 if (REAL_VALUE_ISINF (c2))
6279 /* sqrt(x) > y is x == +Inf, when y is very large. */
6280 if (HONOR_INFINITIES (mode))
6281 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6282 build_real (TREE_TYPE (arg), c2));
6284 /* sqrt(x) > y is always false, when y is very large
6285 and we don't care about infinities. */
6286 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6289 /* sqrt(x) > c is the same as x > c*c. */
6290 return fold_build2_loc (loc, code, type, arg,
6291 build_real (TREE_TYPE (arg), c2));
6293 else if (code == LT_EXPR || code == LE_EXPR)
6295 REAL_VALUE_TYPE c2;
6297 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6298 real_convert (&c2, mode, &c2);
6300 if (REAL_VALUE_ISINF (c2))
6302 /* sqrt(x) < y is always true, when y is a very large
6303 value and we don't care about NaNs or Infinities. */
6304 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6305 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6307 /* sqrt(x) < y is x != +Inf when y is very large and we
6308 don't care about NaNs. */
6309 if (! HONOR_NANS (mode))
6310 return fold_build2_loc (loc, NE_EXPR, type, arg,
6311 build_real (TREE_TYPE (arg), c2));
6313 /* sqrt(x) < y is x >= 0 when y is very large and we
6314 don't care about Infinities. */
6315 if (! HONOR_INFINITIES (mode))
6316 return fold_build2_loc (loc, GE_EXPR, type, arg,
6317 build_real (TREE_TYPE (arg), dconst0));
6319 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6320 arg = save_expr (arg);
6321 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6322 fold_build2_loc (loc, GE_EXPR, type, arg,
6323 build_real (TREE_TYPE (arg),
6324 dconst0)),
6325 fold_build2_loc (loc, NE_EXPR, type, arg,
6326 build_real (TREE_TYPE (arg),
6327 c2)));
6330 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6331 if (! HONOR_NANS (mode))
6332 return fold_build2_loc (loc, code, type, arg,
6333 build_real (TREE_TYPE (arg), c2));
6335 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6336 arg = save_expr (arg);
6337 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6338 fold_build2_loc (loc, GE_EXPR, type, arg,
6339 build_real (TREE_TYPE (arg),
6340 dconst0)),
6341 fold_build2_loc (loc, code, type, arg,
6342 build_real (TREE_TYPE (arg),
6343 c2)));
6347 return NULL_TREE;
6350 /* Subroutine of fold() that optimizes comparisons against Infinities,
6351 either +Inf or -Inf.
6353 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6354 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6355 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6357 The function returns the constant folded tree if a simplification
6358 can be made, and NULL_TREE otherwise. */
6360 static tree
6361 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6362 tree arg0, tree arg1)
6364 enum machine_mode mode;
6365 REAL_VALUE_TYPE max;
6366 tree temp;
6367 bool neg;
6369 mode = TYPE_MODE (TREE_TYPE (arg0));
6371 /* For negative infinity swap the sense of the comparison. */
6372 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6373 if (neg)
6374 code = swap_tree_comparison (code);
6376 switch (code)
6378 case GT_EXPR:
6379 /* x > +Inf is always false, if with ignore sNANs. */
6380 if (HONOR_SNANS (mode))
6381 return NULL_TREE;
6382 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6384 case LE_EXPR:
6385 /* x <= +Inf is always true, if we don't case about NaNs. */
6386 if (! HONOR_NANS (mode))
6387 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6389 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6390 arg0 = save_expr (arg0);
6391 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6393 case EQ_EXPR:
6394 case GE_EXPR:
6395 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6396 real_maxval (&max, neg, mode);
6397 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6398 arg0, build_real (TREE_TYPE (arg0), max));
6400 case LT_EXPR:
6401 /* x < +Inf is always equal to x <= DBL_MAX. */
6402 real_maxval (&max, neg, mode);
6403 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6404 arg0, build_real (TREE_TYPE (arg0), max));
6406 case NE_EXPR:
6407 /* x != +Inf is always equal to !(x > DBL_MAX). */
6408 real_maxval (&max, neg, mode);
6409 if (! HONOR_NANS (mode))
6410 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6411 arg0, build_real (TREE_TYPE (arg0), max));
6413 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6414 arg0, build_real (TREE_TYPE (arg0), max));
6415 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6417 default:
6418 break;
6421 return NULL_TREE;
6424 /* Subroutine of fold() that optimizes comparisons of a division by
6425 a nonzero integer constant against an integer constant, i.e.
6426 X/C1 op C2.
6428 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6429 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6430 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6432 The function returns the constant folded tree if a simplification
6433 can be made, and NULL_TREE otherwise. */
6435 static tree
6436 fold_div_compare (location_t loc,
6437 enum tree_code code, tree type, tree arg0, tree arg1)
6439 tree prod, tmp, hi, lo;
6440 tree arg00 = TREE_OPERAND (arg0, 0);
6441 tree arg01 = TREE_OPERAND (arg0, 1);
6442 double_int val;
6443 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6444 bool neg_overflow;
6445 bool overflow;
6447 /* We have to do this the hard way to detect unsigned overflow.
6448 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6449 val = TREE_INT_CST (arg01)
6450 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6451 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6452 neg_overflow = false;
6454 if (unsigned_p)
6456 tmp = int_const_binop (MINUS_EXPR, arg01,
6457 build_int_cst (TREE_TYPE (arg01), 1));
6458 lo = prod;
6460 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6461 val = TREE_INT_CST (prod)
6462 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6463 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6464 -1, overflow | TREE_OVERFLOW (prod));
6466 else if (tree_int_cst_sgn (arg01) >= 0)
6468 tmp = int_const_binop (MINUS_EXPR, arg01,
6469 build_int_cst (TREE_TYPE (arg01), 1));
6470 switch (tree_int_cst_sgn (arg1))
6472 case -1:
6473 neg_overflow = true;
6474 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6475 hi = prod;
6476 break;
6478 case 0:
6479 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6480 hi = tmp;
6481 break;
6483 case 1:
6484 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6485 lo = prod;
6486 break;
6488 default:
6489 gcc_unreachable ();
6492 else
6494 /* A negative divisor reverses the relational operators. */
6495 code = swap_tree_comparison (code);
6497 tmp = int_const_binop (PLUS_EXPR, arg01,
6498 build_int_cst (TREE_TYPE (arg01), 1));
6499 switch (tree_int_cst_sgn (arg1))
6501 case -1:
6502 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6503 lo = prod;
6504 break;
6506 case 0:
6507 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6508 lo = tmp;
6509 break;
6511 case 1:
6512 neg_overflow = true;
6513 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6514 hi = prod;
6515 break;
6517 default:
6518 gcc_unreachable ();
6522 switch (code)
6524 case EQ_EXPR:
6525 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6526 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6527 if (TREE_OVERFLOW (hi))
6528 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6529 if (TREE_OVERFLOW (lo))
6530 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6531 return build_range_check (loc, type, arg00, 1, lo, hi);
6533 case NE_EXPR:
6534 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6535 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6536 if (TREE_OVERFLOW (hi))
6537 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6538 if (TREE_OVERFLOW (lo))
6539 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6540 return build_range_check (loc, type, arg00, 0, lo, hi);
6542 case LT_EXPR:
6543 if (TREE_OVERFLOW (lo))
6545 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6546 return omit_one_operand_loc (loc, type, tmp, arg00);
6548 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6550 case LE_EXPR:
6551 if (TREE_OVERFLOW (hi))
6553 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6554 return omit_one_operand_loc (loc, type, tmp, arg00);
6556 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6558 case GT_EXPR:
6559 if (TREE_OVERFLOW (hi))
6561 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6562 return omit_one_operand_loc (loc, type, tmp, arg00);
6564 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6566 case GE_EXPR:
6567 if (TREE_OVERFLOW (lo))
6569 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6570 return omit_one_operand_loc (loc, type, tmp, arg00);
6572 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6574 default:
6575 break;
6578 return NULL_TREE;
6582 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6583 equality/inequality test, then return a simplified form of the test
6584 using a sign testing. Otherwise return NULL. TYPE is the desired
6585 result type. */
6587 static tree
6588 fold_single_bit_test_into_sign_test (location_t loc,
6589 enum tree_code code, tree arg0, tree arg1,
6590 tree result_type)
6592 /* If this is testing a single bit, we can optimize the test. */
6593 if ((code == NE_EXPR || code == EQ_EXPR)
6594 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6595 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6597 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6598 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6599 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6601 if (arg00 != NULL_TREE
6602 /* This is only a win if casting to a signed type is cheap,
6603 i.e. when arg00's type is not a partial mode. */
6604 && TYPE_PRECISION (TREE_TYPE (arg00))
6605 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6607 tree stype = signed_type_for (TREE_TYPE (arg00));
6608 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6609 result_type,
6610 fold_convert_loc (loc, stype, arg00),
6611 build_int_cst (stype, 0));
6615 return NULL_TREE;
6618 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6619 equality/inequality test, then return a simplified form of
6620 the test using shifts and logical operations. Otherwise return
6621 NULL. TYPE is the desired result type. */
6623 tree
6624 fold_single_bit_test (location_t loc, enum tree_code code,
6625 tree arg0, tree arg1, tree result_type)
6627 /* If this is testing a single bit, we can optimize the test. */
6628 if ((code == NE_EXPR || code == EQ_EXPR)
6629 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6630 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6632 tree inner = TREE_OPERAND (arg0, 0);
6633 tree type = TREE_TYPE (arg0);
6634 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6635 enum machine_mode operand_mode = TYPE_MODE (type);
6636 int ops_unsigned;
6637 tree signed_type, unsigned_type, intermediate_type;
6638 tree tem, one;
6640 /* First, see if we can fold the single bit test into a sign-bit
6641 test. */
6642 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6643 result_type);
6644 if (tem)
6645 return tem;
6647 /* Otherwise we have (A & C) != 0 where C is a single bit,
6648 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6649 Similarly for (A & C) == 0. */
6651 /* If INNER is a right shift of a constant and it plus BITNUM does
6652 not overflow, adjust BITNUM and INNER. */
6653 if (TREE_CODE (inner) == RSHIFT_EXPR
6654 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6655 && host_integerp (TREE_OPERAND (inner, 1), 1)
6656 && bitnum < TYPE_PRECISION (type)
6657 && (TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
6658 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6660 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6661 inner = TREE_OPERAND (inner, 0);
6664 /* If we are going to be able to omit the AND below, we must do our
6665 operations as unsigned. If we must use the AND, we have a choice.
6666 Normally unsigned is faster, but for some machines signed is. */
6667 #ifdef LOAD_EXTEND_OP
6668 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6669 && !flag_syntax_only) ? 0 : 1;
6670 #else
6671 ops_unsigned = 1;
6672 #endif
6674 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6675 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6676 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6677 inner = fold_convert_loc (loc, intermediate_type, inner);
6679 if (bitnum != 0)
6680 inner = build2 (RSHIFT_EXPR, intermediate_type,
6681 inner, size_int (bitnum));
6683 one = build_int_cst (intermediate_type, 1);
6685 if (code == EQ_EXPR)
6686 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6688 /* Put the AND last so it can combine with more things. */
6689 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6691 /* Make sure to return the proper type. */
6692 inner = fold_convert_loc (loc, result_type, inner);
6694 return inner;
6696 return NULL_TREE;
6699 /* Check whether we are allowed to reorder operands arg0 and arg1,
6700 such that the evaluation of arg1 occurs before arg0. */
6702 static bool
6703 reorder_operands_p (const_tree arg0, const_tree arg1)
6705 if (! flag_evaluation_order)
6706 return true;
6707 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6708 return true;
6709 return ! TREE_SIDE_EFFECTS (arg0)
6710 && ! TREE_SIDE_EFFECTS (arg1);
6713 /* Test whether it is preferable two swap two operands, ARG0 and
6714 ARG1, for example because ARG0 is an integer constant and ARG1
6715 isn't. If REORDER is true, only recommend swapping if we can
6716 evaluate the operands in reverse order. */
6718 bool
6719 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6721 STRIP_SIGN_NOPS (arg0);
6722 STRIP_SIGN_NOPS (arg1);
6724 if (TREE_CODE (arg1) == INTEGER_CST)
6725 return 0;
6726 if (TREE_CODE (arg0) == INTEGER_CST)
6727 return 1;
6729 if (TREE_CODE (arg1) == REAL_CST)
6730 return 0;
6731 if (TREE_CODE (arg0) == REAL_CST)
6732 return 1;
6734 if (TREE_CODE (arg1) == FIXED_CST)
6735 return 0;
6736 if (TREE_CODE (arg0) == FIXED_CST)
6737 return 1;
6739 if (TREE_CODE (arg1) == COMPLEX_CST)
6740 return 0;
6741 if (TREE_CODE (arg0) == COMPLEX_CST)
6742 return 1;
6744 if (TREE_CONSTANT (arg1))
6745 return 0;
6746 if (TREE_CONSTANT (arg0))
6747 return 1;
6749 if (optimize_function_for_size_p (cfun))
6750 return 0;
6752 if (reorder && flag_evaluation_order
6753 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6754 return 0;
6756 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6757 for commutative and comparison operators. Ensuring a canonical
6758 form allows the optimizers to find additional redundancies without
6759 having to explicitly check for both orderings. */
6760 if (TREE_CODE (arg0) == SSA_NAME
6761 && TREE_CODE (arg1) == SSA_NAME
6762 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6763 return 1;
6765 /* Put SSA_NAMEs last. */
6766 if (TREE_CODE (arg1) == SSA_NAME)
6767 return 0;
6768 if (TREE_CODE (arg0) == SSA_NAME)
6769 return 1;
6771 /* Put variables last. */
6772 if (DECL_P (arg1))
6773 return 0;
6774 if (DECL_P (arg0))
6775 return 1;
6777 return 0;
6780 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6781 ARG0 is extended to a wider type. */
6783 static tree
6784 fold_widened_comparison (location_t loc, enum tree_code code,
6785 tree type, tree arg0, tree arg1)
6787 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6788 tree arg1_unw;
6789 tree shorter_type, outer_type;
6790 tree min, max;
6791 bool above, below;
6793 if (arg0_unw == arg0)
6794 return NULL_TREE;
6795 shorter_type = TREE_TYPE (arg0_unw);
6797 #ifdef HAVE_canonicalize_funcptr_for_compare
6798 /* Disable this optimization if we're casting a function pointer
6799 type on targets that require function pointer canonicalization. */
6800 if (HAVE_canonicalize_funcptr_for_compare
6801 && TREE_CODE (shorter_type) == POINTER_TYPE
6802 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6803 return NULL_TREE;
6804 #endif
6806 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6807 return NULL_TREE;
6809 arg1_unw = get_unwidened (arg1, NULL_TREE);
6811 /* If possible, express the comparison in the shorter mode. */
6812 if ((code == EQ_EXPR || code == NE_EXPR
6813 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6814 && (TREE_TYPE (arg1_unw) == shorter_type
6815 || ((TYPE_PRECISION (shorter_type)
6816 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6817 && (TYPE_UNSIGNED (shorter_type)
6818 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6819 || (TREE_CODE (arg1_unw) == INTEGER_CST
6820 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6821 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6822 && int_fits_type_p (arg1_unw, shorter_type))))
6823 return fold_build2_loc (loc, code, type, arg0_unw,
6824 fold_convert_loc (loc, shorter_type, arg1_unw));
6826 if (TREE_CODE (arg1_unw) != INTEGER_CST
6827 || TREE_CODE (shorter_type) != INTEGER_TYPE
6828 || !int_fits_type_p (arg1_unw, shorter_type))
6829 return NULL_TREE;
6831 /* If we are comparing with the integer that does not fit into the range
6832 of the shorter type, the result is known. */
6833 outer_type = TREE_TYPE (arg1_unw);
6834 min = lower_bound_in_type (outer_type, shorter_type);
6835 max = upper_bound_in_type (outer_type, shorter_type);
6837 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6838 max, arg1_unw));
6839 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6840 arg1_unw, min));
6842 switch (code)
6844 case EQ_EXPR:
6845 if (above || below)
6846 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6847 break;
6849 case NE_EXPR:
6850 if (above || below)
6851 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6852 break;
6854 case LT_EXPR:
6855 case LE_EXPR:
6856 if (above)
6857 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6858 else if (below)
6859 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6861 case GT_EXPR:
6862 case GE_EXPR:
6863 if (above)
6864 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6865 else if (below)
6866 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6868 default:
6869 break;
6872 return NULL_TREE;
6875 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6876 ARG0 just the signedness is changed. */
6878 static tree
6879 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6880 tree arg0, tree arg1)
6882 tree arg0_inner;
6883 tree inner_type, outer_type;
6885 if (!CONVERT_EXPR_P (arg0))
6886 return NULL_TREE;
6888 outer_type = TREE_TYPE (arg0);
6889 arg0_inner = TREE_OPERAND (arg0, 0);
6890 inner_type = TREE_TYPE (arg0_inner);
6892 #ifdef HAVE_canonicalize_funcptr_for_compare
6893 /* Disable this optimization if we're casting a function pointer
6894 type on targets that require function pointer canonicalization. */
6895 if (HAVE_canonicalize_funcptr_for_compare
6896 && TREE_CODE (inner_type) == POINTER_TYPE
6897 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6898 return NULL_TREE;
6899 #endif
6901 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6902 return NULL_TREE;
6904 if (TREE_CODE (arg1) != INTEGER_CST
6905 && !(CONVERT_EXPR_P (arg1)
6906 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6907 return NULL_TREE;
6909 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6910 && code != NE_EXPR
6911 && code != EQ_EXPR)
6912 return NULL_TREE;
6914 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6915 return NULL_TREE;
6917 if (TREE_CODE (arg1) == INTEGER_CST)
6918 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6919 0, TREE_OVERFLOW (arg1));
6920 else
6921 arg1 = fold_convert_loc (loc, inner_type, arg1);
6923 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6926 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6927 step of the array. Reconstructs s and delta in the case of s *
6928 delta being an integer constant (and thus already folded). ADDR is
6929 the address. MULT is the multiplicative expression. If the
6930 function succeeds, the new address expression is returned.
6931 Otherwise NULL_TREE is returned. LOC is the location of the
6932 resulting expression. */
6934 static tree
6935 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6937 tree s, delta, step;
6938 tree ref = TREE_OPERAND (addr, 0), pref;
6939 tree ret, pos;
6940 tree itype;
6941 bool mdim = false;
6943 /* Strip the nops that might be added when converting op1 to sizetype. */
6944 STRIP_NOPS (op1);
6946 /* Canonicalize op1 into a possibly non-constant delta
6947 and an INTEGER_CST s. */
6948 if (TREE_CODE (op1) == MULT_EXPR)
6950 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6952 STRIP_NOPS (arg0);
6953 STRIP_NOPS (arg1);
6955 if (TREE_CODE (arg0) == INTEGER_CST)
6957 s = arg0;
6958 delta = arg1;
6960 else if (TREE_CODE (arg1) == INTEGER_CST)
6962 s = arg1;
6963 delta = arg0;
6965 else
6966 return NULL_TREE;
6968 else if (TREE_CODE (op1) == INTEGER_CST)
6970 delta = op1;
6971 s = NULL_TREE;
6973 else
6975 /* Simulate we are delta * 1. */
6976 delta = op1;
6977 s = integer_one_node;
6980 /* Handle &x.array the same as we would handle &x.array[0]. */
6981 if (TREE_CODE (ref) == COMPONENT_REF
6982 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6984 tree domain;
6986 /* Remember if this was a multi-dimensional array. */
6987 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6988 mdim = true;
6990 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6991 if (! domain)
6992 goto cont;
6993 itype = TREE_TYPE (domain);
6995 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6996 if (TREE_CODE (step) != INTEGER_CST)
6997 goto cont;
6999 if (s)
7001 if (! tree_int_cst_equal (step, s))
7002 goto cont;
7004 else
7006 /* Try if delta is a multiple of step. */
7007 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7008 if (! tmp)
7009 goto cont;
7010 delta = tmp;
7013 /* Only fold here if we can verify we do not overflow one
7014 dimension of a multi-dimensional array. */
7015 if (mdim)
7017 tree tmp;
7019 if (!TYPE_MIN_VALUE (domain)
7020 || !TYPE_MAX_VALUE (domain)
7021 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7022 goto cont;
7024 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7025 fold_convert_loc (loc, itype,
7026 TYPE_MIN_VALUE (domain)),
7027 fold_convert_loc (loc, itype, delta));
7028 if (TREE_CODE (tmp) != INTEGER_CST
7029 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7030 goto cont;
7033 /* We found a suitable component reference. */
7035 pref = TREE_OPERAND (addr, 0);
7036 ret = copy_node (pref);
7037 SET_EXPR_LOCATION (ret, loc);
7039 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7040 fold_build2_loc
7041 (loc, PLUS_EXPR, itype,
7042 fold_convert_loc (loc, itype,
7043 TYPE_MIN_VALUE
7044 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7045 fold_convert_loc (loc, itype, delta)),
7046 NULL_TREE, NULL_TREE);
7047 return build_fold_addr_expr_loc (loc, ret);
7050 cont:
7052 for (;; ref = TREE_OPERAND (ref, 0))
7054 if (TREE_CODE (ref) == ARRAY_REF)
7056 tree domain;
7058 /* Remember if this was a multi-dimensional array. */
7059 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7060 mdim = true;
7062 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7063 if (! domain)
7064 continue;
7065 itype = TREE_TYPE (domain);
7067 step = array_ref_element_size (ref);
7068 if (TREE_CODE (step) != INTEGER_CST)
7069 continue;
7071 if (s)
7073 if (! tree_int_cst_equal (step, s))
7074 continue;
7076 else
7078 /* Try if delta is a multiple of step. */
7079 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7080 if (! tmp)
7081 continue;
7082 delta = tmp;
7085 /* Only fold here if we can verify we do not overflow one
7086 dimension of a multi-dimensional array. */
7087 if (mdim)
7089 tree tmp;
7091 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7092 || !TYPE_MAX_VALUE (domain)
7093 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7094 continue;
7096 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7097 fold_convert_loc (loc, itype,
7098 TREE_OPERAND (ref, 1)),
7099 fold_convert_loc (loc, itype, delta));
7100 if (!tmp
7101 || TREE_CODE (tmp) != INTEGER_CST
7102 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7103 continue;
7106 break;
7108 else
7109 mdim = false;
7111 if (!handled_component_p (ref))
7112 return NULL_TREE;
7115 /* We found the suitable array reference. So copy everything up to it,
7116 and replace the index. */
7118 pref = TREE_OPERAND (addr, 0);
7119 ret = copy_node (pref);
7120 SET_EXPR_LOCATION (ret, loc);
7121 pos = ret;
7123 while (pref != ref)
7125 pref = TREE_OPERAND (pref, 0);
7126 TREE_OPERAND (pos, 0) = copy_node (pref);
7127 pos = TREE_OPERAND (pos, 0);
7130 TREE_OPERAND (pos, 1)
7131 = fold_build2_loc (loc, PLUS_EXPR, itype,
7132 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7133 fold_convert_loc (loc, itype, delta));
7134 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7138 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7139 means A >= Y && A != MAX, but in this case we know that
7140 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7142 static tree
7143 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7145 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7147 if (TREE_CODE (bound) == LT_EXPR)
7148 a = TREE_OPERAND (bound, 0);
7149 else if (TREE_CODE (bound) == GT_EXPR)
7150 a = TREE_OPERAND (bound, 1);
7151 else
7152 return NULL_TREE;
7154 typea = TREE_TYPE (a);
7155 if (!INTEGRAL_TYPE_P (typea)
7156 && !POINTER_TYPE_P (typea))
7157 return NULL_TREE;
7159 if (TREE_CODE (ineq) == LT_EXPR)
7161 a1 = TREE_OPERAND (ineq, 1);
7162 y = TREE_OPERAND (ineq, 0);
7164 else if (TREE_CODE (ineq) == GT_EXPR)
7166 a1 = TREE_OPERAND (ineq, 0);
7167 y = TREE_OPERAND (ineq, 1);
7169 else
7170 return NULL_TREE;
7172 if (TREE_TYPE (a1) != typea)
7173 return NULL_TREE;
7175 if (POINTER_TYPE_P (typea))
7177 /* Convert the pointer types into integer before taking the difference. */
7178 tree ta = fold_convert_loc (loc, ssizetype, a);
7179 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7180 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7182 else
7183 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7185 if (!diff || !integer_onep (diff))
7186 return NULL_TREE;
7188 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7191 /* Fold a sum or difference of at least one multiplication.
7192 Returns the folded tree or NULL if no simplification could be made. */
7194 static tree
7195 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7196 tree arg0, tree arg1)
7198 tree arg00, arg01, arg10, arg11;
7199 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7201 /* (A * C) +- (B * C) -> (A+-B) * C.
7202 (A * C) +- A -> A * (C+-1).
7203 We are most concerned about the case where C is a constant,
7204 but other combinations show up during loop reduction. Since
7205 it is not difficult, try all four possibilities. */
7207 if (TREE_CODE (arg0) == MULT_EXPR)
7209 arg00 = TREE_OPERAND (arg0, 0);
7210 arg01 = TREE_OPERAND (arg0, 1);
7212 else if (TREE_CODE (arg0) == INTEGER_CST)
7214 arg00 = build_one_cst (type);
7215 arg01 = arg0;
7217 else
7219 /* We cannot generate constant 1 for fract. */
7220 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7221 return NULL_TREE;
7222 arg00 = arg0;
7223 arg01 = build_one_cst (type);
7225 if (TREE_CODE (arg1) == MULT_EXPR)
7227 arg10 = TREE_OPERAND (arg1, 0);
7228 arg11 = TREE_OPERAND (arg1, 1);
7230 else if (TREE_CODE (arg1) == INTEGER_CST)
7232 arg10 = build_one_cst (type);
7233 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7234 the purpose of this canonicalization. */
7235 if (TREE_INT_CST_HIGH (arg1) == -1
7236 && negate_expr_p (arg1)
7237 && code == PLUS_EXPR)
7239 arg11 = negate_expr (arg1);
7240 code = MINUS_EXPR;
7242 else
7243 arg11 = arg1;
7245 else
7247 /* We cannot generate constant 1 for fract. */
7248 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7249 return NULL_TREE;
7250 arg10 = arg1;
7251 arg11 = build_one_cst (type);
7253 same = NULL_TREE;
7255 if (operand_equal_p (arg01, arg11, 0))
7256 same = arg01, alt0 = arg00, alt1 = arg10;
7257 else if (operand_equal_p (arg00, arg10, 0))
7258 same = arg00, alt0 = arg01, alt1 = arg11;
7259 else if (operand_equal_p (arg00, arg11, 0))
7260 same = arg00, alt0 = arg01, alt1 = arg10;
7261 else if (operand_equal_p (arg01, arg10, 0))
7262 same = arg01, alt0 = arg00, alt1 = arg11;
7264 /* No identical multiplicands; see if we can find a common
7265 power-of-two factor in non-power-of-two multiplies. This
7266 can help in multi-dimensional array access. */
7267 else if (host_integerp (arg01, 0)
7268 && host_integerp (arg11, 0))
7270 HOST_WIDE_INT int01, int11, tmp;
7271 bool swap = false;
7272 tree maybe_same;
7273 int01 = TREE_INT_CST_LOW (arg01);
7274 int11 = TREE_INT_CST_LOW (arg11);
7276 /* Move min of absolute values to int11. */
7277 if (absu_hwi (int01) < absu_hwi (int11))
7279 tmp = int01, int01 = int11, int11 = tmp;
7280 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7281 maybe_same = arg01;
7282 swap = true;
7284 else
7285 maybe_same = arg11;
7287 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7288 /* The remainder should not be a constant, otherwise we
7289 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7290 increased the number of multiplications necessary. */
7291 && TREE_CODE (arg10) != INTEGER_CST)
7293 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7294 build_int_cst (TREE_TYPE (arg00),
7295 int01 / int11));
7296 alt1 = arg10;
7297 same = maybe_same;
7298 if (swap)
7299 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7303 if (same)
7304 return fold_build2_loc (loc, MULT_EXPR, type,
7305 fold_build2_loc (loc, code, type,
7306 fold_convert_loc (loc, type, alt0),
7307 fold_convert_loc (loc, type, alt1)),
7308 fold_convert_loc (loc, type, same));
7310 return NULL_TREE;
7313 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7314 specified by EXPR into the buffer PTR of length LEN bytes.
7315 Return the number of bytes placed in the buffer, or zero
7316 upon failure. */
7318 static int
7319 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7321 tree type = TREE_TYPE (expr);
7322 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7323 int byte, offset, word, words;
7324 unsigned char value;
7326 if (total_bytes > len)
7327 return 0;
7328 words = total_bytes / UNITS_PER_WORD;
7330 for (byte = 0; byte < total_bytes; byte++)
7332 int bitpos = byte * BITS_PER_UNIT;
7333 if (bitpos < HOST_BITS_PER_WIDE_INT)
7334 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7335 else
7336 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7337 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7339 if (total_bytes > UNITS_PER_WORD)
7341 word = byte / UNITS_PER_WORD;
7342 if (WORDS_BIG_ENDIAN)
7343 word = (words - 1) - word;
7344 offset = word * UNITS_PER_WORD;
7345 if (BYTES_BIG_ENDIAN)
7346 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7347 else
7348 offset += byte % UNITS_PER_WORD;
7350 else
7351 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7352 ptr[offset] = value;
7354 return total_bytes;
7358 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7359 specified by EXPR into the buffer PTR of length LEN bytes.
7360 Return the number of bytes placed in the buffer, or zero
7361 upon failure. */
7363 static int
7364 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7366 tree type = TREE_TYPE (expr);
7367 enum machine_mode mode = TYPE_MODE (type);
7368 int total_bytes = GET_MODE_SIZE (mode);
7369 FIXED_VALUE_TYPE value;
7370 tree i_value, i_type;
7372 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7373 return 0;
7375 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7377 if (NULL_TREE == i_type
7378 || TYPE_PRECISION (i_type) != total_bytes)
7379 return 0;
7381 value = TREE_FIXED_CST (expr);
7382 i_value = double_int_to_tree (i_type, value.data);
7384 return native_encode_int (i_value, ptr, len);
7388 /* Subroutine of native_encode_expr. Encode the REAL_CST
7389 specified by EXPR into the buffer PTR of length LEN bytes.
7390 Return the number of bytes placed in the buffer, or zero
7391 upon failure. */
7393 static int
7394 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7396 tree type = TREE_TYPE (expr);
7397 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7398 int byte, offset, word, words, bitpos;
7399 unsigned char value;
7401 /* There are always 32 bits in each long, no matter the size of
7402 the hosts long. We handle floating point representations with
7403 up to 192 bits. */
7404 long tmp[6];
7406 if (total_bytes > len)
7407 return 0;
7408 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7410 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7412 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7413 bitpos += BITS_PER_UNIT)
7415 byte = (bitpos / BITS_PER_UNIT) & 3;
7416 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7418 if (UNITS_PER_WORD < 4)
7420 word = byte / UNITS_PER_WORD;
7421 if (WORDS_BIG_ENDIAN)
7422 word = (words - 1) - word;
7423 offset = word * UNITS_PER_WORD;
7424 if (BYTES_BIG_ENDIAN)
7425 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7426 else
7427 offset += byte % UNITS_PER_WORD;
7429 else
7430 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7431 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7433 return total_bytes;
7436 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7437 specified by EXPR into the buffer PTR of length LEN bytes.
7438 Return the number of bytes placed in the buffer, or zero
7439 upon failure. */
7441 static int
7442 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7444 int rsize, isize;
7445 tree part;
7447 part = TREE_REALPART (expr);
7448 rsize = native_encode_expr (part, ptr, len);
7449 if (rsize == 0)
7450 return 0;
7451 part = TREE_IMAGPART (expr);
7452 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7453 if (isize != rsize)
7454 return 0;
7455 return rsize + isize;
7459 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7460 specified by EXPR into the buffer PTR of length LEN bytes.
7461 Return the number of bytes placed in the buffer, or zero
7462 upon failure. */
7464 static int
7465 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7467 unsigned i, count;
7468 int size, offset;
7469 tree itype, elem;
7471 offset = 0;
7472 count = VECTOR_CST_NELTS (expr);
7473 itype = TREE_TYPE (TREE_TYPE (expr));
7474 size = GET_MODE_SIZE (TYPE_MODE (itype));
7475 for (i = 0; i < count; i++)
7477 elem = VECTOR_CST_ELT (expr, i);
7478 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7479 return 0;
7480 offset += size;
7482 return offset;
7486 /* Subroutine of native_encode_expr. Encode the STRING_CST
7487 specified by EXPR into the buffer PTR of length LEN bytes.
7488 Return the number of bytes placed in the buffer, or zero
7489 upon failure. */
7491 static int
7492 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7494 tree type = TREE_TYPE (expr);
7495 HOST_WIDE_INT total_bytes;
7497 if (TREE_CODE (type) != ARRAY_TYPE
7498 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7499 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7500 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7501 return 0;
7502 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7503 if (total_bytes > len)
7504 return 0;
7505 if (TREE_STRING_LENGTH (expr) < total_bytes)
7507 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7508 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7509 total_bytes - TREE_STRING_LENGTH (expr));
7511 else
7512 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7513 return total_bytes;
7517 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7518 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7519 buffer PTR of length LEN bytes. Return the number of bytes
7520 placed in the buffer, or zero upon failure. */
7523 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7525 switch (TREE_CODE (expr))
7527 case INTEGER_CST:
7528 return native_encode_int (expr, ptr, len);
7530 case REAL_CST:
7531 return native_encode_real (expr, ptr, len);
7533 case FIXED_CST:
7534 return native_encode_fixed (expr, ptr, len);
7536 case COMPLEX_CST:
7537 return native_encode_complex (expr, ptr, len);
7539 case VECTOR_CST:
7540 return native_encode_vector (expr, ptr, len);
7542 case STRING_CST:
7543 return native_encode_string (expr, ptr, len);
7545 default:
7546 return 0;
7551 /* Subroutine of native_interpret_expr. Interpret the contents of
7552 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7553 If the buffer cannot be interpreted, return NULL_TREE. */
7555 static tree
7556 native_interpret_int (tree type, const unsigned char *ptr, int len)
7558 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7559 double_int result;
7561 if (total_bytes > len
7562 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7563 return NULL_TREE;
7565 result = double_int::from_buffer (ptr, total_bytes);
7567 return double_int_to_tree (type, result);
7571 /* Subroutine of native_interpret_expr. Interpret the contents of
7572 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7573 If the buffer cannot be interpreted, return NULL_TREE. */
7575 static tree
7576 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7578 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7579 double_int result;
7580 FIXED_VALUE_TYPE fixed_value;
7582 if (total_bytes > len
7583 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7584 return NULL_TREE;
7586 result = double_int::from_buffer (ptr, total_bytes);
7587 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7589 return build_fixed (type, fixed_value);
7593 /* Subroutine of native_interpret_expr. Interpret the contents of
7594 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7595 If the buffer cannot be interpreted, return NULL_TREE. */
7597 static tree
7598 native_interpret_real (tree type, const unsigned char *ptr, int len)
7600 enum machine_mode mode = TYPE_MODE (type);
7601 int total_bytes = GET_MODE_SIZE (mode);
7602 int byte, offset, word, words, bitpos;
7603 unsigned char value;
7604 /* There are always 32 bits in each long, no matter the size of
7605 the hosts long. We handle floating point representations with
7606 up to 192 bits. */
7607 REAL_VALUE_TYPE r;
7608 long tmp[6];
7610 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7611 if (total_bytes > len || total_bytes > 24)
7612 return NULL_TREE;
7613 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7615 memset (tmp, 0, sizeof (tmp));
7616 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7617 bitpos += BITS_PER_UNIT)
7619 byte = (bitpos / BITS_PER_UNIT) & 3;
7620 if (UNITS_PER_WORD < 4)
7622 word = byte / UNITS_PER_WORD;
7623 if (WORDS_BIG_ENDIAN)
7624 word = (words - 1) - word;
7625 offset = word * UNITS_PER_WORD;
7626 if (BYTES_BIG_ENDIAN)
7627 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7628 else
7629 offset += byte % UNITS_PER_WORD;
7631 else
7632 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7633 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7635 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7638 real_from_target (&r, tmp, mode);
7639 return build_real (type, r);
7643 /* Subroutine of native_interpret_expr. Interpret the contents of
7644 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7645 If the buffer cannot be interpreted, return NULL_TREE. */
7647 static tree
7648 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7650 tree etype, rpart, ipart;
7651 int size;
7653 etype = TREE_TYPE (type);
7654 size = GET_MODE_SIZE (TYPE_MODE (etype));
7655 if (size * 2 > len)
7656 return NULL_TREE;
7657 rpart = native_interpret_expr (etype, ptr, size);
7658 if (!rpart)
7659 return NULL_TREE;
7660 ipart = native_interpret_expr (etype, ptr+size, size);
7661 if (!ipart)
7662 return NULL_TREE;
7663 return build_complex (type, rpart, ipart);
7667 /* Subroutine of native_interpret_expr. Interpret the contents of
7668 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7669 If the buffer cannot be interpreted, return NULL_TREE. */
7671 static tree
7672 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7674 tree etype, elem;
7675 int i, size, count;
7676 tree *elements;
7678 etype = TREE_TYPE (type);
7679 size = GET_MODE_SIZE (TYPE_MODE (etype));
7680 count = TYPE_VECTOR_SUBPARTS (type);
7681 if (size * count > len)
7682 return NULL_TREE;
7684 elements = XALLOCAVEC (tree, count);
7685 for (i = count - 1; i >= 0; i--)
7687 elem = native_interpret_expr (etype, ptr+(i*size), size);
7688 if (!elem)
7689 return NULL_TREE;
7690 elements[i] = elem;
7692 return build_vector (type, elements);
7696 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7697 the buffer PTR of length LEN as a constant of type TYPE. For
7698 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7699 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7700 return NULL_TREE. */
7702 tree
7703 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7705 switch (TREE_CODE (type))
7707 case INTEGER_TYPE:
7708 case ENUMERAL_TYPE:
7709 case BOOLEAN_TYPE:
7710 case POINTER_TYPE:
7711 case REFERENCE_TYPE:
7712 return native_interpret_int (type, ptr, len);
7714 case REAL_TYPE:
7715 return native_interpret_real (type, ptr, len);
7717 case FIXED_POINT_TYPE:
7718 return native_interpret_fixed (type, ptr, len);
7720 case COMPLEX_TYPE:
7721 return native_interpret_complex (type, ptr, len);
7723 case VECTOR_TYPE:
7724 return native_interpret_vector (type, ptr, len);
7726 default:
7727 return NULL_TREE;
7731 /* Returns true if we can interpret the contents of a native encoding
7732 as TYPE. */
7734 static bool
7735 can_native_interpret_type_p (tree type)
7737 switch (TREE_CODE (type))
7739 case INTEGER_TYPE:
7740 case ENUMERAL_TYPE:
7741 case BOOLEAN_TYPE:
7742 case POINTER_TYPE:
7743 case REFERENCE_TYPE:
7744 case FIXED_POINT_TYPE:
7745 case REAL_TYPE:
7746 case COMPLEX_TYPE:
7747 case VECTOR_TYPE:
7748 return true;
7749 default:
7750 return false;
7754 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7755 TYPE at compile-time. If we're unable to perform the conversion
7756 return NULL_TREE. */
7758 static tree
7759 fold_view_convert_expr (tree type, tree expr)
7761 /* We support up to 512-bit values (for V8DFmode). */
7762 unsigned char buffer[64];
7763 int len;
7765 /* Check that the host and target are sane. */
7766 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7767 return NULL_TREE;
7769 len = native_encode_expr (expr, buffer, sizeof (buffer));
7770 if (len == 0)
7771 return NULL_TREE;
7773 return native_interpret_expr (type, buffer, len);
7776 /* Build an expression for the address of T. Folds away INDIRECT_REF
7777 to avoid confusing the gimplify process. */
7779 tree
7780 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7782 /* The size of the object is not relevant when talking about its address. */
7783 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7784 t = TREE_OPERAND (t, 0);
7786 if (TREE_CODE (t) == INDIRECT_REF)
7788 t = TREE_OPERAND (t, 0);
7790 if (TREE_TYPE (t) != ptrtype)
7791 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7793 else if (TREE_CODE (t) == MEM_REF
7794 && integer_zerop (TREE_OPERAND (t, 1)))
7795 return TREE_OPERAND (t, 0);
7796 else if (TREE_CODE (t) == MEM_REF
7797 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7798 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7799 TREE_OPERAND (t, 0),
7800 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7801 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7803 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7805 if (TREE_TYPE (t) != ptrtype)
7806 t = fold_convert_loc (loc, ptrtype, t);
7808 else
7809 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7811 return t;
7814 /* Build an expression for the address of T. */
7816 tree
7817 build_fold_addr_expr_loc (location_t loc, tree t)
7819 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7821 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7824 static bool vec_cst_ctor_to_array (tree, tree *);
7826 /* Fold a unary expression of code CODE and type TYPE with operand
7827 OP0. Return the folded expression if folding is successful.
7828 Otherwise, return NULL_TREE. */
7830 tree
7831 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7833 tree tem;
7834 tree arg0;
7835 enum tree_code_class kind = TREE_CODE_CLASS (code);
7837 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7838 && TREE_CODE_LENGTH (code) == 1);
7840 arg0 = op0;
7841 if (arg0)
7843 if (CONVERT_EXPR_CODE_P (code)
7844 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7846 /* Don't use STRIP_NOPS, because signedness of argument type
7847 matters. */
7848 STRIP_SIGN_NOPS (arg0);
7850 else
7852 /* Strip any conversions that don't change the mode. This
7853 is safe for every expression, except for a comparison
7854 expression because its signedness is derived from its
7855 operands.
7857 Note that this is done as an internal manipulation within
7858 the constant folder, in order to find the simplest
7859 representation of the arguments so that their form can be
7860 studied. In any cases, the appropriate type conversions
7861 should be put back in the tree that will get out of the
7862 constant folder. */
7863 STRIP_NOPS (arg0);
7867 if (TREE_CODE_CLASS (code) == tcc_unary)
7869 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7870 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7871 fold_build1_loc (loc, code, type,
7872 fold_convert_loc (loc, TREE_TYPE (op0),
7873 TREE_OPERAND (arg0, 1))));
7874 else if (TREE_CODE (arg0) == COND_EXPR)
7876 tree arg01 = TREE_OPERAND (arg0, 1);
7877 tree arg02 = TREE_OPERAND (arg0, 2);
7878 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7879 arg01 = fold_build1_loc (loc, code, type,
7880 fold_convert_loc (loc,
7881 TREE_TYPE (op0), arg01));
7882 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7883 arg02 = fold_build1_loc (loc, code, type,
7884 fold_convert_loc (loc,
7885 TREE_TYPE (op0), arg02));
7886 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7887 arg01, arg02);
7889 /* If this was a conversion, and all we did was to move into
7890 inside the COND_EXPR, bring it back out. But leave it if
7891 it is a conversion from integer to integer and the
7892 result precision is no wider than a word since such a
7893 conversion is cheap and may be optimized away by combine,
7894 while it couldn't if it were outside the COND_EXPR. Then return
7895 so we don't get into an infinite recursion loop taking the
7896 conversion out and then back in. */
7898 if ((CONVERT_EXPR_CODE_P (code)
7899 || code == NON_LVALUE_EXPR)
7900 && TREE_CODE (tem) == COND_EXPR
7901 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7902 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7903 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7904 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7905 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7906 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7907 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7908 && (INTEGRAL_TYPE_P
7909 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7910 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7911 || flag_syntax_only))
7912 tem = build1_loc (loc, code, type,
7913 build3 (COND_EXPR,
7914 TREE_TYPE (TREE_OPERAND
7915 (TREE_OPERAND (tem, 1), 0)),
7916 TREE_OPERAND (tem, 0),
7917 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7918 TREE_OPERAND (TREE_OPERAND (tem, 2),
7919 0)));
7920 return tem;
7924 switch (code)
7926 case PAREN_EXPR:
7927 /* Re-association barriers around constants and other re-association
7928 barriers can be removed. */
7929 if (CONSTANT_CLASS_P (op0)
7930 || TREE_CODE (op0) == PAREN_EXPR)
7931 return fold_convert_loc (loc, type, op0);
7932 return NULL_TREE;
7934 CASE_CONVERT:
7935 case FLOAT_EXPR:
7936 case FIX_TRUNC_EXPR:
7937 if (TREE_TYPE (op0) == type)
7938 return op0;
7940 if (COMPARISON_CLASS_P (op0))
7942 /* If we have (type) (a CMP b) and type is an integral type, return
7943 new expression involving the new type. Canonicalize
7944 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7945 non-integral type.
7946 Do not fold the result as that would not simplify further, also
7947 folding again results in recursions. */
7948 if (TREE_CODE (type) == BOOLEAN_TYPE)
7949 return build2_loc (loc, TREE_CODE (op0), type,
7950 TREE_OPERAND (op0, 0),
7951 TREE_OPERAND (op0, 1));
7952 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7953 && TREE_CODE (type) != VECTOR_TYPE)
7954 return build3_loc (loc, COND_EXPR, type, op0,
7955 constant_boolean_node (true, type),
7956 constant_boolean_node (false, type));
7959 /* Handle cases of two conversions in a row. */
7960 if (CONVERT_EXPR_P (op0))
7962 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7963 tree inter_type = TREE_TYPE (op0);
7964 int inside_int = INTEGRAL_TYPE_P (inside_type);
7965 int inside_ptr = POINTER_TYPE_P (inside_type);
7966 int inside_float = FLOAT_TYPE_P (inside_type);
7967 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7968 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7969 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7970 int inter_int = INTEGRAL_TYPE_P (inter_type);
7971 int inter_ptr = POINTER_TYPE_P (inter_type);
7972 int inter_float = FLOAT_TYPE_P (inter_type);
7973 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7974 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7975 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7976 int final_int = INTEGRAL_TYPE_P (type);
7977 int final_ptr = POINTER_TYPE_P (type);
7978 int final_float = FLOAT_TYPE_P (type);
7979 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7980 unsigned int final_prec = TYPE_PRECISION (type);
7981 int final_unsignedp = TYPE_UNSIGNED (type);
7983 /* check for cases specific to UPC, involving pointer types */
7984 if (final_ptr || inter_ptr || inside_ptr)
7986 int final_pts = final_ptr
7987 && upc_shared_type_p (TREE_TYPE (type));
7988 int inter_pts = inter_ptr
7989 && upc_shared_type_p (TREE_TYPE (inter_type));
7990 int inside_pts = inside_ptr
7991 && upc_shared_type_p (TREE_TYPE (inside_type));
7992 if (final_pts || inter_pts || inside_pts)
7994 if (!((final_pts && inter_pts)
7995 && TREE_TYPE (type) == TREE_TYPE (inter_type))
7996 || ((inter_pts && inside_pts)
7997 && (TREE_TYPE (inter_type)
7998 == TREE_TYPE (inside_type))))
7999 return NULL;
8003 /* In addition to the cases of two conversions in a row
8004 handled below, if we are converting something to its own
8005 type via an object of identical or wider precision, neither
8006 conversion is needed. */
8007 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8008 && (((inter_int || inter_ptr) && final_int)
8009 || (inter_float && final_float))
8010 && inter_prec >= final_prec)
8011 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8013 /* Likewise, if the intermediate and initial types are either both
8014 float or both integer, we don't need the middle conversion if the
8015 former is wider than the latter and doesn't change the signedness
8016 (for integers). Avoid this if the final type is a pointer since
8017 then we sometimes need the middle conversion. Likewise if the
8018 final type has a precision not equal to the size of its mode. */
8019 if (((inter_int && inside_int)
8020 || (inter_float && inside_float)
8021 || (inter_vec && inside_vec))
8022 && inter_prec >= inside_prec
8023 && (inter_float || inter_vec
8024 || inter_unsignedp == inside_unsignedp)
8025 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8026 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8027 && ! final_ptr
8028 && (! final_vec || inter_prec == inside_prec))
8029 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8031 /* If we have a sign-extension of a zero-extended value, we can
8032 replace that by a single zero-extension. Likewise if the
8033 final conversion does not change precision we can drop the
8034 intermediate conversion. */
8035 if (inside_int && inter_int && final_int
8036 && ((inside_prec < inter_prec && inter_prec < final_prec
8037 && inside_unsignedp && !inter_unsignedp)
8038 || final_prec == inter_prec))
8039 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8041 /* Two conversions in a row are not needed unless:
8042 - some conversion is floating-point (overstrict for now), or
8043 - some conversion is a vector (overstrict for now), or
8044 - the intermediate type is narrower than both initial and
8045 final, or
8046 - the intermediate type and innermost type differ in signedness,
8047 and the outermost type is wider than the intermediate, or
8048 - the initial type is a pointer type and the precisions of the
8049 intermediate and final types differ, or
8050 - the final type is a pointer type and the precisions of the
8051 initial and intermediate types differ. */
8052 if (! inside_float && ! inter_float && ! final_float
8053 && ! inside_vec && ! inter_vec && ! final_vec
8054 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8055 && ! (inside_int && inter_int
8056 && inter_unsignedp != inside_unsignedp
8057 && inter_prec < final_prec)
8058 && ((inter_unsignedp && inter_prec > inside_prec)
8059 == (final_unsignedp && final_prec > inter_prec))
8060 && ! (inside_ptr && inter_prec != final_prec)
8061 && ! (final_ptr && inside_prec != inter_prec)
8062 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8063 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8064 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8067 /* Handle (T *)&A.B.C for A being of type T and B and C
8068 living at offset zero. This occurs frequently in
8069 C++ upcasting and then accessing the base. */
8070 if (TREE_CODE (op0) == ADDR_EXPR
8071 && POINTER_TYPE_P (type)
8072 && handled_component_p (TREE_OPERAND (op0, 0)))
8074 HOST_WIDE_INT bitsize, bitpos;
8075 tree offset;
8076 enum machine_mode mode;
8077 int unsignedp, volatilep;
8078 tree base = TREE_OPERAND (op0, 0);
8079 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8080 &mode, &unsignedp, &volatilep, false);
8081 /* If the reference was to a (constant) zero offset, we can use
8082 the address of the base if it has the same base type
8083 as the result type and the pointer type is unqualified. */
8084 if (! offset && bitpos == 0
8085 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8086 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8087 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8088 return fold_convert_loc (loc, type,
8089 build_fold_addr_expr_loc (loc, base));
8092 if (TREE_CODE (op0) == MODIFY_EXPR
8093 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8094 /* Detect assigning a bitfield. */
8095 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8096 && DECL_BIT_FIELD
8097 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8099 /* Don't leave an assignment inside a conversion
8100 unless assigning a bitfield. */
8101 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8102 /* First do the assignment, then return converted constant. */
8103 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8104 TREE_NO_WARNING (tem) = 1;
8105 TREE_USED (tem) = 1;
8106 return tem;
8109 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8110 constants (if x has signed type, the sign bit cannot be set
8111 in c). This folds extension into the BIT_AND_EXPR.
8112 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8113 very likely don't have maximal range for their precision and this
8114 transformation effectively doesn't preserve non-maximal ranges. */
8115 if (TREE_CODE (type) == INTEGER_TYPE
8116 && TREE_CODE (op0) == BIT_AND_EXPR
8117 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8119 tree and_expr = op0;
8120 tree and0 = TREE_OPERAND (and_expr, 0);
8121 tree and1 = TREE_OPERAND (and_expr, 1);
8122 int change = 0;
8124 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8125 || (TYPE_PRECISION (type)
8126 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8127 change = 1;
8128 else if (TYPE_PRECISION (TREE_TYPE (and1))
8129 <= HOST_BITS_PER_WIDE_INT
8130 && host_integerp (and1, 1))
8132 unsigned HOST_WIDE_INT cst;
8134 cst = tree_low_cst (and1, 1);
8135 cst &= HOST_WIDE_INT_M1U
8136 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8137 change = (cst == 0);
8138 #ifdef LOAD_EXTEND_OP
8139 if (change
8140 && !flag_syntax_only
8141 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8142 == ZERO_EXTEND))
8144 tree uns = unsigned_type_for (TREE_TYPE (and0));
8145 and0 = fold_convert_loc (loc, uns, and0);
8146 and1 = fold_convert_loc (loc, uns, and1);
8148 #endif
8150 if (change)
8152 tem = force_fit_type_double (type, tree_to_double_int (and1),
8153 0, TREE_OVERFLOW (and1));
8154 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8155 fold_convert_loc (loc, type, and0), tem);
8159 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8160 when one of the new casts will fold away. Conservatively we assume
8161 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8162 if (POINTER_TYPE_P (type)
8163 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8164 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8165 && !upc_shared_type_p (TREE_TYPE (type))
8166 && !upc_shared_type_p (TREE_TYPE (
8167 TREE_TYPE (TREE_OPERAND (arg0, 0))))
8168 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8169 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8170 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8172 tree arg00 = TREE_OPERAND (arg0, 0);
8173 tree arg01 = TREE_OPERAND (arg0, 1);
8175 return fold_build_pointer_plus_loc
8176 (loc, fold_convert_loc (loc, type, arg00), arg01);
8179 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8180 of the same precision, and X is an integer type not narrower than
8181 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8182 if (INTEGRAL_TYPE_P (type)
8183 && TREE_CODE (op0) == BIT_NOT_EXPR
8184 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8185 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8186 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8188 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8189 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8190 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8191 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8192 fold_convert_loc (loc, type, tem));
8195 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8196 type of X and Y (integer types only). */
8197 if (INTEGRAL_TYPE_P (type)
8198 && TREE_CODE (op0) == MULT_EXPR
8199 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8200 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8202 /* Be careful not to introduce new overflows. */
8203 tree mult_type;
8204 if (TYPE_OVERFLOW_WRAPS (type))
8205 mult_type = type;
8206 else
8207 mult_type = unsigned_type_for (type);
8209 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8211 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8212 fold_convert_loc (loc, mult_type,
8213 TREE_OPERAND (op0, 0)),
8214 fold_convert_loc (loc, mult_type,
8215 TREE_OPERAND (op0, 1)));
8216 return fold_convert_loc (loc, type, tem);
8220 tem = fold_convert_const (code, type, op0);
8221 return tem ? tem : NULL_TREE;
8223 case ADDR_SPACE_CONVERT_EXPR:
8224 if (integer_zerop (arg0))
8225 return fold_convert_const (code, type, arg0);
8226 return NULL_TREE;
8228 case FIXED_CONVERT_EXPR:
8229 tem = fold_convert_const (code, type, arg0);
8230 return tem ? tem : NULL_TREE;
8232 case VIEW_CONVERT_EXPR:
8233 if (TREE_TYPE (op0) == type)
8234 return op0;
8235 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8236 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8237 type, TREE_OPERAND (op0, 0));
8238 if (TREE_CODE (op0) == MEM_REF)
8239 return fold_build2_loc (loc, MEM_REF, type,
8240 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8242 /* For integral conversions with the same precision or pointer
8243 conversions use a NOP_EXPR instead. */
8244 if ((INTEGRAL_TYPE_P (type)
8245 || (POINTER_TYPE_P (type)
8246 && !upc_shared_type_p (TREE_TYPE (type))))
8247 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8248 || (POINTER_TYPE_P (TREE_TYPE (op0))
8249 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8250 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8251 return fold_convert_loc (loc, type, op0);
8253 /* Strip inner integral conversions that do not change the precision. */
8254 if (CONVERT_EXPR_P (op0)
8255 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8256 || (POINTER_TYPE_P (TREE_TYPE (op0))
8257 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8258 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8259 || (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8260 && !upc_shared_type_p (TREE_TYPE (
8261 TREE_TYPE (
8262 TREE_OPERAND (op0, 0))))))
8263 && (TYPE_PRECISION (TREE_TYPE (op0))
8264 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8265 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8266 type, TREE_OPERAND (op0, 0));
8268 return fold_view_convert_expr (type, op0);
8270 case NEGATE_EXPR:
8271 tem = fold_negate_expr (loc, arg0);
8272 if (tem)
8273 return fold_convert_loc (loc, type, tem);
8274 return NULL_TREE;
8276 case ABS_EXPR:
8277 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8278 return fold_abs_const (arg0, type);
8279 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8280 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8281 /* Convert fabs((double)float) into (double)fabsf(float). */
8282 else if (TREE_CODE (arg0) == NOP_EXPR
8283 && TREE_CODE (type) == REAL_TYPE)
8285 tree targ0 = strip_float_extensions (arg0);
8286 if (targ0 != arg0)
8287 return fold_convert_loc (loc, type,
8288 fold_build1_loc (loc, ABS_EXPR,
8289 TREE_TYPE (targ0),
8290 targ0));
8292 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8293 else if (TREE_CODE (arg0) == ABS_EXPR)
8294 return arg0;
8295 else if (tree_expr_nonnegative_p (arg0))
8296 return arg0;
8298 /* Strip sign ops from argument. */
8299 if (TREE_CODE (type) == REAL_TYPE)
8301 tem = fold_strip_sign_ops (arg0);
8302 if (tem)
8303 return fold_build1_loc (loc, ABS_EXPR, type,
8304 fold_convert_loc (loc, type, tem));
8306 return NULL_TREE;
8308 case CONJ_EXPR:
8309 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8310 return fold_convert_loc (loc, type, arg0);
8311 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8313 tree itype = TREE_TYPE (type);
8314 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8315 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8316 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8317 negate_expr (ipart));
8319 if (TREE_CODE (arg0) == COMPLEX_CST)
8321 tree itype = TREE_TYPE (type);
8322 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8323 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8324 return build_complex (type, rpart, negate_expr (ipart));
8326 if (TREE_CODE (arg0) == CONJ_EXPR)
8327 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8328 return NULL_TREE;
8330 case BIT_NOT_EXPR:
8331 if (TREE_CODE (arg0) == INTEGER_CST)
8332 return fold_not_const (arg0, type);
8333 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8334 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8335 /* Convert ~ (-A) to A - 1. */
8336 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8337 return fold_build2_loc (loc, MINUS_EXPR, type,
8338 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8339 build_int_cst (type, 1));
8340 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8341 else if (INTEGRAL_TYPE_P (type)
8342 && ((TREE_CODE (arg0) == MINUS_EXPR
8343 && integer_onep (TREE_OPERAND (arg0, 1)))
8344 || (TREE_CODE (arg0) == PLUS_EXPR
8345 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8346 return fold_build1_loc (loc, NEGATE_EXPR, type,
8347 fold_convert_loc (loc, type,
8348 TREE_OPERAND (arg0, 0)));
8349 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8350 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8351 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8352 fold_convert_loc (loc, type,
8353 TREE_OPERAND (arg0, 0)))))
8354 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8355 fold_convert_loc (loc, type,
8356 TREE_OPERAND (arg0, 1)));
8357 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8358 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8359 fold_convert_loc (loc, type,
8360 TREE_OPERAND (arg0, 1)))))
8361 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8362 fold_convert_loc (loc, type,
8363 TREE_OPERAND (arg0, 0)), tem);
8364 /* Perform BIT_NOT_EXPR on each element individually. */
8365 else if (TREE_CODE (arg0) == VECTOR_CST)
8367 tree *elements;
8368 tree elem;
8369 unsigned count = VECTOR_CST_NELTS (arg0), i;
8371 elements = XALLOCAVEC (tree, count);
8372 for (i = 0; i < count; i++)
8374 elem = VECTOR_CST_ELT (arg0, i);
8375 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8376 if (elem == NULL_TREE)
8377 break;
8378 elements[i] = elem;
8380 if (i == count)
8381 return build_vector (type, elements);
8383 else if (COMPARISON_CLASS_P (arg0)
8384 && (VECTOR_TYPE_P (type)
8385 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8387 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8388 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8389 HONOR_NANS (TYPE_MODE (op_type)));
8390 if (subcode != ERROR_MARK)
8391 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8392 TREE_OPERAND (arg0, 1));
8396 return NULL_TREE;
8398 case TRUTH_NOT_EXPR:
8399 /* Note that the operand of this must be an int
8400 and its values must be 0 or 1.
8401 ("true" is a fixed value perhaps depending on the language,
8402 but we don't handle values other than 1 correctly yet.) */
8403 tem = fold_truth_not_expr (loc, arg0);
8404 if (!tem)
8405 return NULL_TREE;
8406 return fold_convert_loc (loc, type, tem);
8408 case REALPART_EXPR:
8409 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8410 return fold_convert_loc (loc, type, arg0);
8411 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8412 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8413 TREE_OPERAND (arg0, 1));
8414 if (TREE_CODE (arg0) == COMPLEX_CST)
8415 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8416 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8418 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8419 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8420 fold_build1_loc (loc, REALPART_EXPR, itype,
8421 TREE_OPERAND (arg0, 0)),
8422 fold_build1_loc (loc, REALPART_EXPR, itype,
8423 TREE_OPERAND (arg0, 1)));
8424 return fold_convert_loc (loc, type, tem);
8426 if (TREE_CODE (arg0) == CONJ_EXPR)
8428 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8429 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8430 TREE_OPERAND (arg0, 0));
8431 return fold_convert_loc (loc, type, tem);
8433 if (TREE_CODE (arg0) == CALL_EXPR)
8435 tree fn = get_callee_fndecl (arg0);
8436 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8437 switch (DECL_FUNCTION_CODE (fn))
8439 CASE_FLT_FN (BUILT_IN_CEXPI):
8440 fn = mathfn_built_in (type, BUILT_IN_COS);
8441 if (fn)
8442 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8443 break;
8445 default:
8446 break;
8449 return NULL_TREE;
8451 case IMAGPART_EXPR:
8452 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8453 return build_zero_cst (type);
8454 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8455 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8456 TREE_OPERAND (arg0, 0));
8457 if (TREE_CODE (arg0) == COMPLEX_CST)
8458 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8459 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8461 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8462 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8463 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8464 TREE_OPERAND (arg0, 0)),
8465 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8466 TREE_OPERAND (arg0, 1)));
8467 return fold_convert_loc (loc, type, tem);
8469 if (TREE_CODE (arg0) == CONJ_EXPR)
8471 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8472 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8473 return fold_convert_loc (loc, type, negate_expr (tem));
8475 if (TREE_CODE (arg0) == CALL_EXPR)
8477 tree fn = get_callee_fndecl (arg0);
8478 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8479 switch (DECL_FUNCTION_CODE (fn))
8481 CASE_FLT_FN (BUILT_IN_CEXPI):
8482 fn = mathfn_built_in (type, BUILT_IN_SIN);
8483 if (fn)
8484 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8485 break;
8487 default:
8488 break;
8491 return NULL_TREE;
8493 case INDIRECT_REF:
8494 /* Fold *&X to X if X is an lvalue. */
8495 if (TREE_CODE (op0) == ADDR_EXPR)
8497 tree op00 = TREE_OPERAND (op0, 0);
8498 if ((TREE_CODE (op00) == VAR_DECL
8499 || TREE_CODE (op00) == PARM_DECL
8500 || TREE_CODE (op00) == RESULT_DECL)
8501 && !TREE_READONLY (op00))
8502 return op00;
8504 return NULL_TREE;
8506 case VEC_UNPACK_LO_EXPR:
8507 case VEC_UNPACK_HI_EXPR:
8508 case VEC_UNPACK_FLOAT_LO_EXPR:
8509 case VEC_UNPACK_FLOAT_HI_EXPR:
8511 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8512 tree *elts;
8513 enum tree_code subcode;
8515 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8516 if (TREE_CODE (arg0) != VECTOR_CST)
8517 return NULL_TREE;
8519 elts = XALLOCAVEC (tree, nelts * 2);
8520 if (!vec_cst_ctor_to_array (arg0, elts))
8521 return NULL_TREE;
8523 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8524 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8525 elts += nelts;
8527 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8528 subcode = NOP_EXPR;
8529 else
8530 subcode = FLOAT_EXPR;
8532 for (i = 0; i < nelts; i++)
8534 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8535 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8536 return NULL_TREE;
8539 return build_vector (type, elts);
8542 case REDUC_MIN_EXPR:
8543 case REDUC_MAX_EXPR:
8544 case REDUC_PLUS_EXPR:
8546 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8547 tree *elts;
8548 enum tree_code subcode;
8550 if (TREE_CODE (op0) != VECTOR_CST)
8551 return NULL_TREE;
8553 elts = XALLOCAVEC (tree, nelts);
8554 if (!vec_cst_ctor_to_array (op0, elts))
8555 return NULL_TREE;
8557 switch (code)
8559 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8560 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8561 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8562 default: gcc_unreachable ();
8565 for (i = 1; i < nelts; i++)
8567 elts[0] = const_binop (subcode, elts[0], elts[i]);
8568 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8569 return NULL_TREE;
8570 elts[i] = build_zero_cst (TREE_TYPE (type));
8573 return build_vector (type, elts);
8576 default:
8577 return NULL_TREE;
8578 } /* switch (code) */
8582 /* If the operation was a conversion do _not_ mark a resulting constant
8583 with TREE_OVERFLOW if the original constant was not. These conversions
8584 have implementation defined behavior and retaining the TREE_OVERFLOW
8585 flag here would confuse later passes such as VRP. */
8586 tree
8587 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8588 tree type, tree op0)
8590 tree res = fold_unary_loc (loc, code, type, op0);
8591 if (res
8592 && TREE_CODE (res) == INTEGER_CST
8593 && TREE_CODE (op0) == INTEGER_CST
8594 && CONVERT_EXPR_CODE_P (code))
8595 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8597 return res;
8600 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8601 operands OP0 and OP1. LOC is the location of the resulting expression.
8602 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8603 Return the folded expression if folding is successful. Otherwise,
8604 return NULL_TREE. */
8605 static tree
8606 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8607 tree arg0, tree arg1, tree op0, tree op1)
8609 tree tem;
8611 /* We only do these simplifications if we are optimizing. */
8612 if (!optimize)
8613 return NULL_TREE;
8615 /* Check for things like (A || B) && (A || C). We can convert this
8616 to A || (B && C). Note that either operator can be any of the four
8617 truth and/or operations and the transformation will still be
8618 valid. Also note that we only care about order for the
8619 ANDIF and ORIF operators. If B contains side effects, this
8620 might change the truth-value of A. */
8621 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8622 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8623 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8624 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8625 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8626 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8628 tree a00 = TREE_OPERAND (arg0, 0);
8629 tree a01 = TREE_OPERAND (arg0, 1);
8630 tree a10 = TREE_OPERAND (arg1, 0);
8631 tree a11 = TREE_OPERAND (arg1, 1);
8632 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8633 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8634 && (code == TRUTH_AND_EXPR
8635 || code == TRUTH_OR_EXPR));
8637 if (operand_equal_p (a00, a10, 0))
8638 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8639 fold_build2_loc (loc, code, type, a01, a11));
8640 else if (commutative && operand_equal_p (a00, a11, 0))
8641 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8642 fold_build2_loc (loc, code, type, a01, a10));
8643 else if (commutative && operand_equal_p (a01, a10, 0))
8644 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8645 fold_build2_loc (loc, code, type, a00, a11));
8647 /* This case if tricky because we must either have commutative
8648 operators or else A10 must not have side-effects. */
8650 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8651 && operand_equal_p (a01, a11, 0))
8652 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8653 fold_build2_loc (loc, code, type, a00, a10),
8654 a01);
8657 /* See if we can build a range comparison. */
8658 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8659 return tem;
8661 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8662 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8664 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8665 if (tem)
8666 return fold_build2_loc (loc, code, type, tem, arg1);
8669 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8670 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8672 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8673 if (tem)
8674 return fold_build2_loc (loc, code, type, arg0, tem);
8677 /* Check for the possibility of merging component references. If our
8678 lhs is another similar operation, try to merge its rhs with our
8679 rhs. Then try to merge our lhs and rhs. */
8680 if (TREE_CODE (arg0) == code
8681 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8682 TREE_OPERAND (arg0, 1), arg1)))
8683 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8685 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8686 return tem;
8688 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8689 && (code == TRUTH_AND_EXPR
8690 || code == TRUTH_ANDIF_EXPR
8691 || code == TRUTH_OR_EXPR
8692 || code == TRUTH_ORIF_EXPR))
8694 enum tree_code ncode, icode;
8696 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8697 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8698 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8700 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8701 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8702 We don't want to pack more than two leafs to a non-IF AND/OR
8703 expression.
8704 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8705 equal to IF-CODE, then we don't want to add right-hand operand.
8706 If the inner right-hand side of left-hand operand has
8707 side-effects, or isn't simple, then we can't add to it,
8708 as otherwise we might destroy if-sequence. */
8709 if (TREE_CODE (arg0) == icode
8710 && simple_operand_p_2 (arg1)
8711 /* Needed for sequence points to handle trappings, and
8712 side-effects. */
8713 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8715 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8716 arg1);
8717 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8718 tem);
8720 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8721 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8722 else if (TREE_CODE (arg1) == icode
8723 && simple_operand_p_2 (arg0)
8724 /* Needed for sequence points to handle trappings, and
8725 side-effects. */
8726 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8728 tem = fold_build2_loc (loc, ncode, type,
8729 arg0, TREE_OPERAND (arg1, 0));
8730 return fold_build2_loc (loc, icode, type, tem,
8731 TREE_OPERAND (arg1, 1));
8733 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8734 into (A OR B).
8735 For sequence point consistancy, we need to check for trapping,
8736 and side-effects. */
8737 else if (code == icode && simple_operand_p_2 (arg0)
8738 && simple_operand_p_2 (arg1))
8739 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8742 return NULL_TREE;
8745 /* Fold a binary expression of code CODE and type TYPE with operands
8746 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8747 Return the folded expression if folding is successful. Otherwise,
8748 return NULL_TREE. */
8750 static tree
8751 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8753 enum tree_code compl_code;
8755 if (code == MIN_EXPR)
8756 compl_code = MAX_EXPR;
8757 else if (code == MAX_EXPR)
8758 compl_code = MIN_EXPR;
8759 else
8760 gcc_unreachable ();
8762 /* MIN (MAX (a, b), b) == b. */
8763 if (TREE_CODE (op0) == compl_code
8764 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8765 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8767 /* MIN (MAX (b, a), b) == b. */
8768 if (TREE_CODE (op0) == compl_code
8769 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8770 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8771 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8773 /* MIN (a, MAX (a, b)) == a. */
8774 if (TREE_CODE (op1) == compl_code
8775 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8776 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8777 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8779 /* MIN (a, MAX (b, a)) == a. */
8780 if (TREE_CODE (op1) == compl_code
8781 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8782 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8783 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8785 return NULL_TREE;
8788 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8789 by changing CODE to reduce the magnitude of constants involved in
8790 ARG0 of the comparison.
8791 Returns a canonicalized comparison tree if a simplification was
8792 possible, otherwise returns NULL_TREE.
8793 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8794 valid if signed overflow is undefined. */
8796 static tree
8797 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8798 tree arg0, tree arg1,
8799 bool *strict_overflow_p)
8801 enum tree_code code0 = TREE_CODE (arg0);
8802 tree t, cst0 = NULL_TREE;
8803 int sgn0;
8804 bool swap = false;
8806 /* Match A +- CST code arg1 and CST code arg1. We can change the
8807 first form only if overflow is undefined. */
8808 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8809 /* In principle pointers also have undefined overflow behavior,
8810 but that causes problems elsewhere. */
8811 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8812 && (code0 == MINUS_EXPR
8813 || code0 == PLUS_EXPR)
8814 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8815 || code0 == INTEGER_CST))
8816 return NULL_TREE;
8818 /* Identify the constant in arg0 and its sign. */
8819 if (code0 == INTEGER_CST)
8820 cst0 = arg0;
8821 else
8822 cst0 = TREE_OPERAND (arg0, 1);
8823 sgn0 = tree_int_cst_sgn (cst0);
8825 /* Overflowed constants and zero will cause problems. */
8826 if (integer_zerop (cst0)
8827 || TREE_OVERFLOW (cst0))
8828 return NULL_TREE;
8830 /* See if we can reduce the magnitude of the constant in
8831 arg0 by changing the comparison code. */
8832 if (code0 == INTEGER_CST)
8834 /* CST <= arg1 -> CST-1 < arg1. */
8835 if (code == LE_EXPR && sgn0 == 1)
8836 code = LT_EXPR;
8837 /* -CST < arg1 -> -CST-1 <= arg1. */
8838 else if (code == LT_EXPR && sgn0 == -1)
8839 code = LE_EXPR;
8840 /* CST > arg1 -> CST-1 >= arg1. */
8841 else if (code == GT_EXPR && sgn0 == 1)
8842 code = GE_EXPR;
8843 /* -CST >= arg1 -> -CST-1 > arg1. */
8844 else if (code == GE_EXPR && sgn0 == -1)
8845 code = GT_EXPR;
8846 else
8847 return NULL_TREE;
8848 /* arg1 code' CST' might be more canonical. */
8849 swap = true;
8851 else
8853 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8854 if (code == LT_EXPR
8855 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8856 code = LE_EXPR;
8857 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8858 else if (code == GT_EXPR
8859 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8860 code = GE_EXPR;
8861 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8862 else if (code == LE_EXPR
8863 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8864 code = LT_EXPR;
8865 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8866 else if (code == GE_EXPR
8867 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8868 code = GT_EXPR;
8869 else
8870 return NULL_TREE;
8871 *strict_overflow_p = true;
8874 /* Now build the constant reduced in magnitude. But not if that
8875 would produce one outside of its types range. */
8876 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8877 && ((sgn0 == 1
8878 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8879 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8880 || (sgn0 == -1
8881 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8882 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8883 /* We cannot swap the comparison here as that would cause us to
8884 endlessly recurse. */
8885 return NULL_TREE;
8887 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8888 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8889 if (code0 != INTEGER_CST)
8890 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8891 t = fold_convert (TREE_TYPE (arg1), t);
8893 /* If swapping might yield to a more canonical form, do so. */
8894 if (swap)
8895 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8896 else
8897 return fold_build2_loc (loc, code, type, t, arg1);
8900 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8901 overflow further. Try to decrease the magnitude of constants involved
8902 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8903 and put sole constants at the second argument position.
8904 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8906 static tree
8907 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8908 tree arg0, tree arg1)
8910 tree t;
8911 bool strict_overflow_p;
8912 const char * const warnmsg = G_("assuming signed overflow does not occur "
8913 "when reducing constant in comparison");
8915 /* Try canonicalization by simplifying arg0. */
8916 strict_overflow_p = false;
8917 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8918 &strict_overflow_p);
8919 if (t)
8921 if (strict_overflow_p)
8922 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8923 return t;
8926 /* Try canonicalization by simplifying arg1 using the swapped
8927 comparison. */
8928 code = swap_tree_comparison (code);
8929 strict_overflow_p = false;
8930 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8931 &strict_overflow_p);
8932 if (t && strict_overflow_p)
8933 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8934 return t;
8937 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8938 space. This is used to avoid issuing overflow warnings for
8939 expressions like &p->x which can not wrap. */
8941 static bool
8942 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8944 double_int di_offset, total;
8946 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8947 return true;
8949 if (bitpos < 0)
8950 return true;
8952 if (offset == NULL_TREE)
8953 di_offset = double_int_zero;
8954 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8955 return true;
8956 else
8957 di_offset = TREE_INT_CST (offset);
8959 bool overflow;
8960 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8961 total = di_offset.add_with_sign (units, true, &overflow);
8962 if (overflow)
8963 return true;
8965 if (total.high != 0)
8966 return true;
8968 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8969 if (size <= 0)
8970 return true;
8972 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8973 array. */
8974 if (TREE_CODE (base) == ADDR_EXPR)
8976 HOST_WIDE_INT base_size;
8978 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8979 if (base_size > 0 && size < base_size)
8980 size = base_size;
8983 return total.low > (unsigned HOST_WIDE_INT) size;
8986 /* Subroutine of fold_binary. This routine performs all of the
8987 transformations that are common to the equality/inequality
8988 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8989 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8990 fold_binary should call fold_binary. Fold a comparison with
8991 tree code CODE and type TYPE with operands OP0 and OP1. Return
8992 the folded comparison or NULL_TREE. */
8994 static tree
8995 fold_comparison (location_t loc, enum tree_code code, tree type,
8996 tree op0, tree op1)
8998 tree arg0, arg1, tem;
9000 arg0 = op0;
9001 arg1 = op1;
9003 STRIP_SIGN_NOPS (arg0);
9004 STRIP_SIGN_NOPS (arg1);
9006 tem = fold_relational_const (code, type, arg0, arg1);
9007 if (tem != NULL_TREE)
9008 return tem;
9010 /* If one arg is a real or integer constant, put it last. */
9011 if (tree_swap_operands_p (arg0, arg1, true))
9012 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9014 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9015 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9016 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9017 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9018 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9019 && (TREE_CODE (arg1) == INTEGER_CST
9020 && !TREE_OVERFLOW (arg1)))
9022 tree const1 = TREE_OPERAND (arg0, 1);
9023 tree const2 = arg1;
9024 tree variable = TREE_OPERAND (arg0, 0);
9025 tree lhs;
9026 int lhs_add;
9027 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9029 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9030 TREE_TYPE (arg1), const2, const1);
9032 /* If the constant operation overflowed this can be
9033 simplified as a comparison against INT_MAX/INT_MIN. */
9034 if (TREE_CODE (lhs) == INTEGER_CST
9035 && TREE_OVERFLOW (lhs))
9037 int const1_sgn = tree_int_cst_sgn (const1);
9038 enum tree_code code2 = code;
9040 /* Get the sign of the constant on the lhs if the
9041 operation were VARIABLE + CONST1. */
9042 if (TREE_CODE (arg0) == MINUS_EXPR)
9043 const1_sgn = -const1_sgn;
9045 /* The sign of the constant determines if we overflowed
9046 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9047 Canonicalize to the INT_MIN overflow by swapping the comparison
9048 if necessary. */
9049 if (const1_sgn == -1)
9050 code2 = swap_tree_comparison (code);
9052 /* We now can look at the canonicalized case
9053 VARIABLE + 1 CODE2 INT_MIN
9054 and decide on the result. */
9055 if (code2 == LT_EXPR
9056 || code2 == LE_EXPR
9057 || code2 == EQ_EXPR)
9058 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9059 else if (code2 == NE_EXPR
9060 || code2 == GE_EXPR
9061 || code2 == GT_EXPR)
9062 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9065 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9066 && (TREE_CODE (lhs) != INTEGER_CST
9067 || !TREE_OVERFLOW (lhs)))
9069 if (code != EQ_EXPR && code != NE_EXPR)
9070 fold_overflow_warning ("assuming signed overflow does not occur "
9071 "when changing X +- C1 cmp C2 to "
9072 "X cmp C1 +- C2",
9073 WARN_STRICT_OVERFLOW_COMPARISON);
9074 return fold_build2_loc (loc, code, type, variable, lhs);
9078 /* For comparisons of pointers we can decompose it to a compile time
9079 comparison of the base objects and the offsets into the object.
9080 This requires at least one operand being an ADDR_EXPR or a
9081 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9082 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9083 && (TREE_CODE (arg0) == ADDR_EXPR
9084 || TREE_CODE (arg1) == ADDR_EXPR
9085 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9086 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9088 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9089 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9090 enum machine_mode mode;
9091 int volatilep, unsignedp;
9092 bool indirect_base0 = false, indirect_base1 = false;
9094 /* Get base and offset for the access. Strip ADDR_EXPR for
9095 get_inner_reference, but put it back by stripping INDIRECT_REF
9096 off the base object if possible. indirect_baseN will be true
9097 if baseN is not an address but refers to the object itself. */
9098 base0 = arg0;
9099 if (TREE_CODE (arg0) == ADDR_EXPR)
9101 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9102 &bitsize, &bitpos0, &offset0, &mode,
9103 &unsignedp, &volatilep, false);
9104 if (TREE_CODE (base0) == INDIRECT_REF)
9105 base0 = TREE_OPERAND (base0, 0);
9106 else
9107 indirect_base0 = true;
9109 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9111 base0 = TREE_OPERAND (arg0, 0);
9112 STRIP_SIGN_NOPS (base0);
9113 if (TREE_CODE (base0) == ADDR_EXPR)
9115 base0 = TREE_OPERAND (base0, 0);
9116 indirect_base0 = true;
9118 offset0 = TREE_OPERAND (arg0, 1);
9119 if (host_integerp (offset0, 0))
9121 HOST_WIDE_INT off = size_low_cst (offset0);
9122 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9123 * BITS_PER_UNIT)
9124 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9126 bitpos0 = off * BITS_PER_UNIT;
9127 offset0 = NULL_TREE;
9132 base1 = arg1;
9133 if (TREE_CODE (arg1) == ADDR_EXPR)
9135 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9136 &bitsize, &bitpos1, &offset1, &mode,
9137 &unsignedp, &volatilep, false);
9138 if (TREE_CODE (base1) == INDIRECT_REF)
9139 base1 = TREE_OPERAND (base1, 0);
9140 else
9141 indirect_base1 = true;
9143 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9145 base1 = TREE_OPERAND (arg1, 0);
9146 STRIP_SIGN_NOPS (base1);
9147 if (TREE_CODE (base1) == ADDR_EXPR)
9149 base1 = TREE_OPERAND (base1, 0);
9150 indirect_base1 = true;
9152 offset1 = TREE_OPERAND (arg1, 1);
9153 if (host_integerp (offset1, 0))
9155 HOST_WIDE_INT off = size_low_cst (offset1);
9156 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9157 * BITS_PER_UNIT)
9158 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9160 bitpos1 = off * BITS_PER_UNIT;
9161 offset1 = NULL_TREE;
9166 /* A local variable can never be pointed to by
9167 the default SSA name of an incoming parameter. */
9168 if ((TREE_CODE (arg0) == ADDR_EXPR
9169 && indirect_base0
9170 && TREE_CODE (base0) == VAR_DECL
9171 && auto_var_in_fn_p (base0, current_function_decl)
9172 && !indirect_base1
9173 && TREE_CODE (base1) == SSA_NAME
9174 && SSA_NAME_IS_DEFAULT_DEF (base1)
9175 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9176 || (TREE_CODE (arg1) == ADDR_EXPR
9177 && indirect_base1
9178 && TREE_CODE (base1) == VAR_DECL
9179 && auto_var_in_fn_p (base1, current_function_decl)
9180 && !indirect_base0
9181 && TREE_CODE (base0) == SSA_NAME
9182 && SSA_NAME_IS_DEFAULT_DEF (base0)
9183 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9185 if (code == NE_EXPR)
9186 return constant_boolean_node (1, type);
9187 else if (code == EQ_EXPR)
9188 return constant_boolean_node (0, type);
9190 /* If we have equivalent bases we might be able to simplify. */
9191 else if (indirect_base0 == indirect_base1
9192 && operand_equal_p (base0, base1, 0))
9194 /* We can fold this expression to a constant if the non-constant
9195 offset parts are equal. */
9196 if ((offset0 == offset1
9197 || (offset0 && offset1
9198 && operand_equal_p (offset0, offset1, 0)))
9199 && (code == EQ_EXPR
9200 || code == NE_EXPR
9201 || (indirect_base0 && DECL_P (base0))
9202 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9205 if (code != EQ_EXPR
9206 && code != NE_EXPR
9207 && bitpos0 != bitpos1
9208 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9209 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9210 fold_overflow_warning (("assuming pointer wraparound does not "
9211 "occur when comparing P +- C1 with "
9212 "P +- C2"),
9213 WARN_STRICT_OVERFLOW_CONDITIONAL);
9215 switch (code)
9217 case EQ_EXPR:
9218 return constant_boolean_node (bitpos0 == bitpos1, type);
9219 case NE_EXPR:
9220 return constant_boolean_node (bitpos0 != bitpos1, type);
9221 case LT_EXPR:
9222 return constant_boolean_node (bitpos0 < bitpos1, type);
9223 case LE_EXPR:
9224 return constant_boolean_node (bitpos0 <= bitpos1, type);
9225 case GE_EXPR:
9226 return constant_boolean_node (bitpos0 >= bitpos1, type);
9227 case GT_EXPR:
9228 return constant_boolean_node (bitpos0 > bitpos1, type);
9229 default:;
9232 /* We can simplify the comparison to a comparison of the variable
9233 offset parts if the constant offset parts are equal.
9234 Be careful to use signed sizetype here because otherwise we
9235 mess with array offsets in the wrong way. This is possible
9236 because pointer arithmetic is restricted to retain within an
9237 object and overflow on pointer differences is undefined as of
9238 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9239 else if (bitpos0 == bitpos1
9240 && ((code == EQ_EXPR || code == NE_EXPR)
9241 || (indirect_base0 && DECL_P (base0))
9242 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9244 /* By converting to signed sizetype we cover middle-end pointer
9245 arithmetic which operates on unsigned pointer types of size
9246 type size and ARRAY_REF offsets which are properly sign or
9247 zero extended from their type in case it is narrower than
9248 sizetype. */
9249 if (offset0 == NULL_TREE)
9250 offset0 = build_int_cst (ssizetype, 0);
9251 else
9252 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9253 if (offset1 == NULL_TREE)
9254 offset1 = build_int_cst (ssizetype, 0);
9255 else
9256 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9258 if (code != EQ_EXPR
9259 && code != NE_EXPR
9260 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9261 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9262 fold_overflow_warning (("assuming pointer wraparound does not "
9263 "occur when comparing P +- C1 with "
9264 "P +- C2"),
9265 WARN_STRICT_OVERFLOW_COMPARISON);
9267 return fold_build2_loc (loc, code, type, offset0, offset1);
9270 /* For non-equal bases we can simplify if they are addresses
9271 of local binding decls or constants. */
9272 else if (indirect_base0 && indirect_base1
9273 /* We know that !operand_equal_p (base0, base1, 0)
9274 because the if condition was false. But make
9275 sure two decls are not the same. */
9276 && base0 != base1
9277 && TREE_CODE (arg0) == ADDR_EXPR
9278 && TREE_CODE (arg1) == ADDR_EXPR
9279 && (((TREE_CODE (base0) == VAR_DECL
9280 || TREE_CODE (base0) == PARM_DECL)
9281 && (targetm.binds_local_p (base0)
9282 || CONSTANT_CLASS_P (base1)))
9283 || CONSTANT_CLASS_P (base0))
9284 && (((TREE_CODE (base1) == VAR_DECL
9285 || TREE_CODE (base1) == PARM_DECL)
9286 && (targetm.binds_local_p (base1)
9287 || CONSTANT_CLASS_P (base0)))
9288 || CONSTANT_CLASS_P (base1)))
9290 if (code == EQ_EXPR)
9291 return omit_two_operands_loc (loc, type, boolean_false_node,
9292 arg0, arg1);
9293 else if (code == NE_EXPR)
9294 return omit_two_operands_loc (loc, type, boolean_true_node,
9295 arg0, arg1);
9297 /* For equal offsets we can simplify to a comparison of the
9298 base addresses. */
9299 else if (bitpos0 == bitpos1
9300 && (indirect_base0
9301 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9302 && (indirect_base1
9303 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9304 && ((offset0 == offset1)
9305 || (offset0 && offset1
9306 && operand_equal_p (offset0, offset1, 0))))
9308 if (indirect_base0)
9309 base0 = build_fold_addr_expr_loc (loc, base0);
9310 if (indirect_base1)
9311 base1 = build_fold_addr_expr_loc (loc, base1);
9312 return fold_build2_loc (loc, code, type, base0, base1);
9316 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9317 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9318 the resulting offset is smaller in absolute value than the
9319 original one. */
9320 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9321 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9322 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9323 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9324 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9325 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9326 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9328 tree const1 = TREE_OPERAND (arg0, 1);
9329 tree const2 = TREE_OPERAND (arg1, 1);
9330 tree variable1 = TREE_OPERAND (arg0, 0);
9331 tree variable2 = TREE_OPERAND (arg1, 0);
9332 tree cst;
9333 const char * const warnmsg = G_("assuming signed overflow does not "
9334 "occur when combining constants around "
9335 "a comparison");
9337 /* Put the constant on the side where it doesn't overflow and is
9338 of lower absolute value than before. */
9339 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9340 ? MINUS_EXPR : PLUS_EXPR,
9341 const2, const1);
9342 if (!TREE_OVERFLOW (cst)
9343 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9345 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9346 return fold_build2_loc (loc, code, type,
9347 variable1,
9348 fold_build2_loc (loc,
9349 TREE_CODE (arg1), TREE_TYPE (arg1),
9350 variable2, cst));
9353 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9354 ? MINUS_EXPR : PLUS_EXPR,
9355 const1, const2);
9356 if (!TREE_OVERFLOW (cst)
9357 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9359 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9360 return fold_build2_loc (loc, code, type,
9361 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9362 variable1, cst),
9363 variable2);
9367 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9368 signed arithmetic case. That form is created by the compiler
9369 often enough for folding it to be of value. One example is in
9370 computing loop trip counts after Operator Strength Reduction. */
9371 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9372 && TREE_CODE (arg0) == MULT_EXPR
9373 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9374 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9375 && integer_zerop (arg1))
9377 tree const1 = TREE_OPERAND (arg0, 1);
9378 tree const2 = arg1; /* zero */
9379 tree variable1 = TREE_OPERAND (arg0, 0);
9380 enum tree_code cmp_code = code;
9382 /* Handle unfolded multiplication by zero. */
9383 if (integer_zerop (const1))
9384 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9386 fold_overflow_warning (("assuming signed overflow does not occur when "
9387 "eliminating multiplication in comparison "
9388 "with zero"),
9389 WARN_STRICT_OVERFLOW_COMPARISON);
9391 /* If const1 is negative we swap the sense of the comparison. */
9392 if (tree_int_cst_sgn (const1) < 0)
9393 cmp_code = swap_tree_comparison (cmp_code);
9395 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9398 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9399 if (tem)
9400 return tem;
9402 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9404 tree targ0 = strip_float_extensions (arg0);
9405 tree targ1 = strip_float_extensions (arg1);
9406 tree newtype = TREE_TYPE (targ0);
9408 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9409 newtype = TREE_TYPE (targ1);
9411 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9412 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9413 return fold_build2_loc (loc, code, type,
9414 fold_convert_loc (loc, newtype, targ0),
9415 fold_convert_loc (loc, newtype, targ1));
9417 /* (-a) CMP (-b) -> b CMP a */
9418 if (TREE_CODE (arg0) == NEGATE_EXPR
9419 && TREE_CODE (arg1) == NEGATE_EXPR)
9420 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9421 TREE_OPERAND (arg0, 0));
9423 if (TREE_CODE (arg1) == REAL_CST)
9425 REAL_VALUE_TYPE cst;
9426 cst = TREE_REAL_CST (arg1);
9428 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9429 if (TREE_CODE (arg0) == NEGATE_EXPR)
9430 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9431 TREE_OPERAND (arg0, 0),
9432 build_real (TREE_TYPE (arg1),
9433 real_value_negate (&cst)));
9435 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9436 /* a CMP (-0) -> a CMP 0 */
9437 if (REAL_VALUE_MINUS_ZERO (cst))
9438 return fold_build2_loc (loc, code, type, arg0,
9439 build_real (TREE_TYPE (arg1), dconst0));
9441 /* x != NaN is always true, other ops are always false. */
9442 if (REAL_VALUE_ISNAN (cst)
9443 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9445 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9446 return omit_one_operand_loc (loc, type, tem, arg0);
9449 /* Fold comparisons against infinity. */
9450 if (REAL_VALUE_ISINF (cst)
9451 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9453 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9454 if (tem != NULL_TREE)
9455 return tem;
9459 /* If this is a comparison of a real constant with a PLUS_EXPR
9460 or a MINUS_EXPR of a real constant, we can convert it into a
9461 comparison with a revised real constant as long as no overflow
9462 occurs when unsafe_math_optimizations are enabled. */
9463 if (flag_unsafe_math_optimizations
9464 && TREE_CODE (arg1) == REAL_CST
9465 && (TREE_CODE (arg0) == PLUS_EXPR
9466 || TREE_CODE (arg0) == MINUS_EXPR)
9467 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9468 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9469 ? MINUS_EXPR : PLUS_EXPR,
9470 arg1, TREE_OPERAND (arg0, 1)))
9471 && !TREE_OVERFLOW (tem))
9472 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9474 /* Likewise, we can simplify a comparison of a real constant with
9475 a MINUS_EXPR whose first operand is also a real constant, i.e.
9476 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9477 floating-point types only if -fassociative-math is set. */
9478 if (flag_associative_math
9479 && TREE_CODE (arg1) == REAL_CST
9480 && TREE_CODE (arg0) == MINUS_EXPR
9481 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9482 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9483 arg1))
9484 && !TREE_OVERFLOW (tem))
9485 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9486 TREE_OPERAND (arg0, 1), tem);
9488 /* Fold comparisons against built-in math functions. */
9489 if (TREE_CODE (arg1) == REAL_CST
9490 && flag_unsafe_math_optimizations
9491 && ! flag_errno_math)
9493 enum built_in_function fcode = builtin_mathfn_code (arg0);
9495 if (fcode != END_BUILTINS)
9497 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9498 if (tem != NULL_TREE)
9499 return tem;
9504 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9505 && CONVERT_EXPR_P (arg0))
9507 /* If we are widening one operand of an integer comparison,
9508 see if the other operand is similarly being widened. Perhaps we
9509 can do the comparison in the narrower type. */
9510 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9511 if (tem)
9512 return tem;
9514 /* Or if we are changing signedness. */
9515 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9516 if (tem)
9517 return tem;
9520 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9521 constant, we can simplify it. */
9522 if (TREE_CODE (arg1) == INTEGER_CST
9523 && (TREE_CODE (arg0) == MIN_EXPR
9524 || TREE_CODE (arg0) == MAX_EXPR)
9525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9527 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9528 if (tem)
9529 return tem;
9532 /* Simplify comparison of something with itself. (For IEEE
9533 floating-point, we can only do some of these simplifications.) */
9534 if (operand_equal_p (arg0, arg1, 0))
9536 switch (code)
9538 case EQ_EXPR:
9539 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9540 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9541 return constant_boolean_node (1, type);
9542 break;
9544 case GE_EXPR:
9545 case LE_EXPR:
9546 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9547 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9548 return constant_boolean_node (1, type);
9549 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9551 case NE_EXPR:
9552 /* For NE, we can only do this simplification if integer
9553 or we don't honor IEEE floating point NaNs. */
9554 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9555 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9556 break;
9557 /* ... fall through ... */
9558 case GT_EXPR:
9559 case LT_EXPR:
9560 return constant_boolean_node (0, type);
9561 default:
9562 gcc_unreachable ();
9566 /* If we are comparing an expression that just has comparisons
9567 of two integer values, arithmetic expressions of those comparisons,
9568 and constants, we can simplify it. There are only three cases
9569 to check: the two values can either be equal, the first can be
9570 greater, or the second can be greater. Fold the expression for
9571 those three values. Since each value must be 0 or 1, we have
9572 eight possibilities, each of which corresponds to the constant 0
9573 or 1 or one of the six possible comparisons.
9575 This handles common cases like (a > b) == 0 but also handles
9576 expressions like ((x > y) - (y > x)) > 0, which supposedly
9577 occur in macroized code. */
9579 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9581 tree cval1 = 0, cval2 = 0;
9582 int save_p = 0;
9584 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9585 /* Don't handle degenerate cases here; they should already
9586 have been handled anyway. */
9587 && cval1 != 0 && cval2 != 0
9588 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9589 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9590 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9591 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9592 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9593 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9594 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9596 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9597 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9599 /* We can't just pass T to eval_subst in case cval1 or cval2
9600 was the same as ARG1. */
9602 tree high_result
9603 = fold_build2_loc (loc, code, type,
9604 eval_subst (loc, arg0, cval1, maxval,
9605 cval2, minval),
9606 arg1);
9607 tree equal_result
9608 = fold_build2_loc (loc, code, type,
9609 eval_subst (loc, arg0, cval1, maxval,
9610 cval2, maxval),
9611 arg1);
9612 tree low_result
9613 = fold_build2_loc (loc, code, type,
9614 eval_subst (loc, arg0, cval1, minval,
9615 cval2, maxval),
9616 arg1);
9618 /* All three of these results should be 0 or 1. Confirm they are.
9619 Then use those values to select the proper code to use. */
9621 if (TREE_CODE (high_result) == INTEGER_CST
9622 && TREE_CODE (equal_result) == INTEGER_CST
9623 && TREE_CODE (low_result) == INTEGER_CST)
9625 /* Make a 3-bit mask with the high-order bit being the
9626 value for `>', the next for '=', and the low for '<'. */
9627 switch ((integer_onep (high_result) * 4)
9628 + (integer_onep (equal_result) * 2)
9629 + integer_onep (low_result))
9631 case 0:
9632 /* Always false. */
9633 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9634 case 1:
9635 code = LT_EXPR;
9636 break;
9637 case 2:
9638 code = EQ_EXPR;
9639 break;
9640 case 3:
9641 code = LE_EXPR;
9642 break;
9643 case 4:
9644 code = GT_EXPR;
9645 break;
9646 case 5:
9647 code = NE_EXPR;
9648 break;
9649 case 6:
9650 code = GE_EXPR;
9651 break;
9652 case 7:
9653 /* Always true. */
9654 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9657 if (save_p)
9659 tem = save_expr (build2 (code, type, cval1, cval2));
9660 SET_EXPR_LOCATION (tem, loc);
9661 return tem;
9663 return fold_build2_loc (loc, code, type, cval1, cval2);
9668 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9669 into a single range test. */
9670 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9671 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9672 && TREE_CODE (arg1) == INTEGER_CST
9673 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9674 && !integer_zerop (TREE_OPERAND (arg0, 1))
9675 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9676 && !TREE_OVERFLOW (arg1))
9678 tem = fold_div_compare (loc, code, type, arg0, arg1);
9679 if (tem != NULL_TREE)
9680 return tem;
9683 /* Fold ~X op ~Y as Y op X. */
9684 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9685 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9687 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9688 return fold_build2_loc (loc, code, type,
9689 fold_convert_loc (loc, cmp_type,
9690 TREE_OPERAND (arg1, 0)),
9691 TREE_OPERAND (arg0, 0));
9694 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9695 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9696 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9698 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9699 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9700 TREE_OPERAND (arg0, 0),
9701 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9702 fold_convert_loc (loc, cmp_type, arg1)));
9705 return NULL_TREE;
9709 /* Subroutine of fold_binary. Optimize complex multiplications of the
9710 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9711 argument EXPR represents the expression "z" of type TYPE. */
9713 static tree
9714 fold_mult_zconjz (location_t loc, tree type, tree expr)
9716 tree itype = TREE_TYPE (type);
9717 tree rpart, ipart, tem;
9719 if (TREE_CODE (expr) == COMPLEX_EXPR)
9721 rpart = TREE_OPERAND (expr, 0);
9722 ipart = TREE_OPERAND (expr, 1);
9724 else if (TREE_CODE (expr) == COMPLEX_CST)
9726 rpart = TREE_REALPART (expr);
9727 ipart = TREE_IMAGPART (expr);
9729 else
9731 expr = save_expr (expr);
9732 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9733 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9736 rpart = save_expr (rpart);
9737 ipart = save_expr (ipart);
9738 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9739 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9740 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9741 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9742 build_zero_cst (itype));
9746 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9747 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9748 guarantees that P and N have the same least significant log2(M) bits.
9749 N is not otherwise constrained. In particular, N is not normalized to
9750 0 <= N < M as is common. In general, the precise value of P is unknown.
9751 M is chosen as large as possible such that constant N can be determined.
9753 Returns M and sets *RESIDUE to N.
9755 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9756 account. This is not always possible due to PR 35705.
9759 static unsigned HOST_WIDE_INT
9760 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9761 bool allow_func_align)
9763 enum tree_code code;
9765 *residue = 0;
9767 code = TREE_CODE (expr);
9768 if (code == ADDR_EXPR)
9770 unsigned int bitalign;
9771 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9772 *residue /= BITS_PER_UNIT;
9773 return bitalign / BITS_PER_UNIT;
9775 else if (code == POINTER_PLUS_EXPR)
9777 tree op0, op1;
9778 unsigned HOST_WIDE_INT modulus;
9779 enum tree_code inner_code;
9781 op0 = TREE_OPERAND (expr, 0);
9782 STRIP_NOPS (op0);
9783 modulus = get_pointer_modulus_and_residue (op0, residue,
9784 allow_func_align);
9786 op1 = TREE_OPERAND (expr, 1);
9787 STRIP_NOPS (op1);
9788 inner_code = TREE_CODE (op1);
9789 if (inner_code == INTEGER_CST)
9791 *residue += TREE_INT_CST_LOW (op1);
9792 return modulus;
9794 else if (inner_code == MULT_EXPR)
9796 op1 = TREE_OPERAND (op1, 1);
9797 if (TREE_CODE (op1) == INTEGER_CST)
9799 unsigned HOST_WIDE_INT align;
9801 /* Compute the greatest power-of-2 divisor of op1. */
9802 align = TREE_INT_CST_LOW (op1);
9803 align &= -align;
9805 /* If align is non-zero and less than *modulus, replace
9806 *modulus with align., If align is 0, then either op1 is 0
9807 or the greatest power-of-2 divisor of op1 doesn't fit in an
9808 unsigned HOST_WIDE_INT. In either case, no additional
9809 constraint is imposed. */
9810 if (align)
9811 modulus = MIN (modulus, align);
9813 return modulus;
9818 /* If we get here, we were unable to determine anything useful about the
9819 expression. */
9820 return 1;
9823 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9824 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9826 static bool
9827 vec_cst_ctor_to_array (tree arg, tree *elts)
9829 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9831 if (TREE_CODE (arg) == VECTOR_CST)
9833 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9834 elts[i] = VECTOR_CST_ELT (arg, i);
9836 else if (TREE_CODE (arg) == CONSTRUCTOR)
9838 constructor_elt *elt;
9840 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9841 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9842 return false;
9843 else
9844 elts[i] = elt->value;
9846 else
9847 return false;
9848 for (; i < nelts; i++)
9849 elts[i]
9850 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9851 return true;
9854 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9855 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9856 NULL_TREE otherwise. */
9858 static tree
9859 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9861 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9862 tree *elts;
9863 bool need_ctor = false;
9865 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9866 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9867 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9868 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9869 return NULL_TREE;
9871 elts = XALLOCAVEC (tree, nelts * 3);
9872 if (!vec_cst_ctor_to_array (arg0, elts)
9873 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9874 return NULL_TREE;
9876 for (i = 0; i < nelts; i++)
9878 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9879 need_ctor = true;
9880 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9883 if (need_ctor)
9885 vec<constructor_elt, va_gc> *v;
9886 vec_alloc (v, nelts);
9887 for (i = 0; i < nelts; i++)
9888 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9889 return build_constructor (type, v);
9891 else
9892 return build_vector (type, &elts[2 * nelts]);
9895 /* Try to fold a pointer difference of type TYPE two address expressions of
9896 array references AREF0 and AREF1 using location LOC. Return a
9897 simplified expression for the difference or NULL_TREE. */
9899 static tree
9900 fold_addr_of_array_ref_difference (location_t loc, tree type,
9901 tree aref0, tree aref1)
9903 tree base0 = TREE_OPERAND (aref0, 0);
9904 tree base1 = TREE_OPERAND (aref1, 0);
9905 tree base_offset = build_int_cst (type, 0);
9907 /* If the bases are array references as well, recurse. If the bases
9908 are pointer indirections compute the difference of the pointers.
9909 If the bases are equal, we are set. */
9910 if ((TREE_CODE (base0) == ARRAY_REF
9911 && TREE_CODE (base1) == ARRAY_REF
9912 && (base_offset
9913 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9914 || (INDIRECT_REF_P (base0)
9915 && INDIRECT_REF_P (base1)
9916 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9917 TREE_OPERAND (base0, 0),
9918 TREE_OPERAND (base1, 0))))
9919 || operand_equal_p (base0, base1, 0))
9921 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9922 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9923 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9924 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9925 return fold_build2_loc (loc, PLUS_EXPR, type,
9926 base_offset,
9927 fold_build2_loc (loc, MULT_EXPR, type,
9928 diff, esz));
9930 return NULL_TREE;
9933 /* If the real or vector real constant CST of type TYPE has an exact
9934 inverse, return it, else return NULL. */
9936 static tree
9937 exact_inverse (tree type, tree cst)
9939 REAL_VALUE_TYPE r;
9940 tree unit_type, *elts;
9941 enum machine_mode mode;
9942 unsigned vec_nelts, i;
9944 switch (TREE_CODE (cst))
9946 case REAL_CST:
9947 r = TREE_REAL_CST (cst);
9949 if (exact_real_inverse (TYPE_MODE (type), &r))
9950 return build_real (type, r);
9952 return NULL_TREE;
9954 case VECTOR_CST:
9955 vec_nelts = VECTOR_CST_NELTS (cst);
9956 elts = XALLOCAVEC (tree, vec_nelts);
9957 unit_type = TREE_TYPE (type);
9958 mode = TYPE_MODE (unit_type);
9960 for (i = 0; i < vec_nelts; i++)
9962 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9963 if (!exact_real_inverse (mode, &r))
9964 return NULL_TREE;
9965 elts[i] = build_real (unit_type, r);
9968 return build_vector (type, elts);
9970 default:
9971 return NULL_TREE;
9975 /* Mask out the tz least significant bits of X of type TYPE where
9976 tz is the number of trailing zeroes in Y. */
9977 static double_int
9978 mask_with_tz (tree type, double_int x, double_int y)
9980 int tz = y.trailing_zeros ();
9982 if (tz > 0)
9984 double_int mask;
9986 mask = ~double_int::mask (tz);
9987 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9988 return mask & x;
9990 return x;
9993 /* Fold a binary expression of code CODE and type TYPE with operands
9994 OP0 and OP1. LOC is the location of the resulting expression.
9995 Return the folded expression if folding is successful. Otherwise,
9996 return NULL_TREE. */
9998 tree
9999 fold_binary_loc (location_t loc,
10000 enum tree_code code, tree type, tree op0, tree op1)
10002 enum tree_code_class kind = TREE_CODE_CLASS (code);
10003 tree arg0, arg1, tem;
10004 tree t1 = NULL_TREE;
10005 bool strict_overflow_p;
10006 unsigned int prec;
10008 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10009 && TREE_CODE_LENGTH (code) == 2
10010 && op0 != NULL_TREE
10011 && op1 != NULL_TREE);
10013 arg0 = op0;
10014 arg1 = op1;
10016 /* Strip any conversions that don't change the mode. This is
10017 safe for every expression, except for a comparison expression
10018 because its signedness is derived from its operands. So, in
10019 the latter case, only strip conversions that don't change the
10020 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10021 preserved.
10023 Note that this is done as an internal manipulation within the
10024 constant folder, in order to find the simplest representation
10025 of the arguments so that their form can be studied. In any
10026 cases, the appropriate type conversions should be put back in
10027 the tree that will get out of the constant folder. */
10029 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10031 STRIP_SIGN_NOPS (arg0);
10032 STRIP_SIGN_NOPS (arg1);
10034 else
10036 STRIP_NOPS (arg0);
10037 STRIP_NOPS (arg1);
10040 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10041 constant but we can't do arithmetic on them. */
10042 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10043 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10044 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10045 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10046 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10047 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10048 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10050 if (kind == tcc_binary)
10052 /* Make sure type and arg0 have the same saturating flag. */
10053 gcc_assert (TYPE_SATURATING (type)
10054 == TYPE_SATURATING (TREE_TYPE (arg0)));
10055 tem = const_binop (code, arg0, arg1);
10057 else if (kind == tcc_comparison)
10058 tem = fold_relational_const (code, type, arg0, arg1);
10059 else
10060 tem = NULL_TREE;
10062 if (tem != NULL_TREE)
10064 if (TREE_TYPE (tem) != type)
10065 tem = fold_convert_loc (loc, type, tem);
10066 return tem;
10070 /* If this is a commutative operation, and ARG0 is a constant, move it
10071 to ARG1 to reduce the number of tests below. */
10072 if (commutative_tree_code (code)
10073 && tree_swap_operands_p (arg0, arg1, true))
10074 return fold_build2_loc (loc, code, type, op1, op0);
10076 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10078 First check for cases where an arithmetic operation is applied to a
10079 compound, conditional, or comparison operation. Push the arithmetic
10080 operation inside the compound or conditional to see if any folding
10081 can then be done. Convert comparison to conditional for this purpose.
10082 The also optimizes non-constant cases that used to be done in
10083 expand_expr.
10085 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10086 one of the operands is a comparison and the other is a comparison, a
10087 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10088 code below would make the expression more complex. Change it to a
10089 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10090 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10092 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10093 || code == EQ_EXPR || code == NE_EXPR)
10094 && TREE_CODE (type) != VECTOR_TYPE
10095 && ((truth_value_p (TREE_CODE (arg0))
10096 && (truth_value_p (TREE_CODE (arg1))
10097 || (TREE_CODE (arg1) == BIT_AND_EXPR
10098 && integer_onep (TREE_OPERAND (arg1, 1)))))
10099 || (truth_value_p (TREE_CODE (arg1))
10100 && (truth_value_p (TREE_CODE (arg0))
10101 || (TREE_CODE (arg0) == BIT_AND_EXPR
10102 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10104 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10105 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10106 : TRUTH_XOR_EXPR,
10107 boolean_type_node,
10108 fold_convert_loc (loc, boolean_type_node, arg0),
10109 fold_convert_loc (loc, boolean_type_node, arg1));
10111 if (code == EQ_EXPR)
10112 tem = invert_truthvalue_loc (loc, tem);
10114 return fold_convert_loc (loc, type, tem);
10117 if (TREE_CODE_CLASS (code) == tcc_binary
10118 || TREE_CODE_CLASS (code) == tcc_comparison)
10120 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10122 tem = fold_build2_loc (loc, code, type,
10123 fold_convert_loc (loc, TREE_TYPE (op0),
10124 TREE_OPERAND (arg0, 1)), op1);
10125 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10126 tem);
10128 if (TREE_CODE (arg1) == COMPOUND_EXPR
10129 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10131 tem = fold_build2_loc (loc, code, type, op0,
10132 fold_convert_loc (loc, TREE_TYPE (op1),
10133 TREE_OPERAND (arg1, 1)));
10134 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10135 tem);
10138 if (TREE_CODE (arg0) == COND_EXPR
10139 || TREE_CODE (arg0) == VEC_COND_EXPR
10140 || COMPARISON_CLASS_P (arg0))
10142 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10143 arg0, arg1,
10144 /*cond_first_p=*/1);
10145 if (tem != NULL_TREE)
10146 return tem;
10149 if (TREE_CODE (arg1) == COND_EXPR
10150 || TREE_CODE (arg1) == VEC_COND_EXPR
10151 || COMPARISON_CLASS_P (arg1))
10153 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10154 arg1, arg0,
10155 /*cond_first_p=*/0);
10156 if (tem != NULL_TREE)
10157 return tem;
10161 switch (code)
10163 case MEM_REF:
10164 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10165 if (TREE_CODE (arg0) == ADDR_EXPR
10166 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10168 tree iref = TREE_OPERAND (arg0, 0);
10169 return fold_build2 (MEM_REF, type,
10170 TREE_OPERAND (iref, 0),
10171 int_const_binop (PLUS_EXPR, arg1,
10172 TREE_OPERAND (iref, 1)));
10175 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10176 if (TREE_CODE (arg0) == ADDR_EXPR
10177 && handled_component_p (TREE_OPERAND (arg0, 0)))
10179 tree base;
10180 HOST_WIDE_INT coffset;
10181 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10182 &coffset);
10183 if (!base)
10184 return NULL_TREE;
10185 return fold_build2 (MEM_REF, type,
10186 build_fold_addr_expr (base),
10187 int_const_binop (PLUS_EXPR, arg1,
10188 size_int (coffset)));
10191 return NULL_TREE;
10193 case POINTER_PLUS_EXPR:
10194 /* 0 +p index -> (type)index */
10195 if (integer_zerop (arg0))
10196 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10198 /* PTR +p 0 -> PTR */
10199 if (integer_zerop (arg1))
10200 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10202 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10203 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10204 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10205 return fold_convert_loc (loc, type,
10206 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10207 fold_convert_loc (loc, sizetype,
10208 arg1),
10209 fold_convert_loc (loc, sizetype,
10210 arg0)));
10212 /* (PTR +p B) +p A -> PTR +p (B + A) */
10213 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10214 && !upc_shared_type_p (TREE_TYPE (type)))
10216 tree inner;
10217 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10218 tree arg00 = TREE_OPERAND (arg0, 0);
10219 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10220 arg01, fold_convert_loc (loc, sizetype, arg1));
10221 return fold_convert_loc (loc, type,
10222 fold_build_pointer_plus_loc (loc,
10223 arg00, inner));
10226 /* PTR_CST +p CST -> CST1 */
10227 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10228 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10229 fold_convert_loc (loc, type, arg1));
10231 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10232 of the array. Loop optimizer sometimes produce this type of
10233 expressions. */
10234 if (TREE_CODE (arg0) == ADDR_EXPR)
10236 tem = try_move_mult_to_index (loc, arg0,
10237 fold_convert_loc (loc,
10238 ssizetype, arg1));
10239 if (tem)
10240 return fold_convert_loc (loc, type, tem);
10243 return NULL_TREE;
10245 case PLUS_EXPR:
10246 /* A + (-B) -> A - B */
10247 if (TREE_CODE (arg1) == NEGATE_EXPR)
10248 return fold_build2_loc (loc, MINUS_EXPR, type,
10249 fold_convert_loc (loc, type, arg0),
10250 fold_convert_loc (loc, type,
10251 TREE_OPERAND (arg1, 0)));
10252 /* (-A) + B -> B - A */
10253 if (TREE_CODE (arg0) == NEGATE_EXPR
10254 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10255 return fold_build2_loc (loc, MINUS_EXPR, type,
10256 fold_convert_loc (loc, type, arg1),
10257 fold_convert_loc (loc, type,
10258 TREE_OPERAND (arg0, 0)));
10260 /* Disable further optimizations involving UPC shared pointers,
10261 because integers are not interoperable with shared pointers. */
10262 if ((TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10263 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10264 || (TREE_TYPE (arg1) && POINTER_TYPE_P (TREE_TYPE (arg1))
10265 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1)))))
10266 return NULL_TREE;
10268 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10270 /* Convert ~A + 1 to -A. */
10271 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10272 && integer_onep (arg1))
10273 return fold_build1_loc (loc, NEGATE_EXPR, type,
10274 fold_convert_loc (loc, type,
10275 TREE_OPERAND (arg0, 0)));
10277 /* ~X + X is -1. */
10278 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10279 && !TYPE_OVERFLOW_TRAPS (type))
10281 tree tem = TREE_OPERAND (arg0, 0);
10283 STRIP_NOPS (tem);
10284 if (operand_equal_p (tem, arg1, 0))
10286 t1 = build_all_ones_cst (type);
10287 return omit_one_operand_loc (loc, type, t1, arg1);
10291 /* X + ~X is -1. */
10292 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10293 && !TYPE_OVERFLOW_TRAPS (type))
10295 tree tem = TREE_OPERAND (arg1, 0);
10297 STRIP_NOPS (tem);
10298 if (operand_equal_p (arg0, tem, 0))
10300 t1 = build_all_ones_cst (type);
10301 return omit_one_operand_loc (loc, type, t1, arg0);
10305 /* X + (X / CST) * -CST is X % CST. */
10306 if (TREE_CODE (arg1) == MULT_EXPR
10307 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10308 && operand_equal_p (arg0,
10309 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10311 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10312 tree cst1 = TREE_OPERAND (arg1, 1);
10313 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10314 cst1, cst0);
10315 if (sum && integer_zerop (sum))
10316 return fold_convert_loc (loc, type,
10317 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10318 TREE_TYPE (arg0), arg0,
10319 cst0));
10323 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10324 one. Make sure the type is not saturating and has the signedness of
10325 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10326 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10327 if ((TREE_CODE (arg0) == MULT_EXPR
10328 || TREE_CODE (arg1) == MULT_EXPR)
10329 && !TYPE_SATURATING (type)
10330 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10331 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10332 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10334 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10335 if (tem)
10336 return tem;
10339 if (! FLOAT_TYPE_P (type))
10341 if (integer_zerop (arg1))
10342 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10344 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10345 with a constant, and the two constants have no bits in common,
10346 we should treat this as a BIT_IOR_EXPR since this may produce more
10347 simplifications. */
10348 if (TREE_CODE (arg0) == BIT_AND_EXPR
10349 && TREE_CODE (arg1) == BIT_AND_EXPR
10350 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10351 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10352 && integer_zerop (const_binop (BIT_AND_EXPR,
10353 TREE_OPERAND (arg0, 1),
10354 TREE_OPERAND (arg1, 1))))
10356 code = BIT_IOR_EXPR;
10357 goto bit_ior;
10360 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10361 (plus (plus (mult) (mult)) (foo)) so that we can
10362 take advantage of the factoring cases below. */
10363 if (TYPE_OVERFLOW_WRAPS (type)
10364 && (((TREE_CODE (arg0) == PLUS_EXPR
10365 || TREE_CODE (arg0) == MINUS_EXPR)
10366 && TREE_CODE (arg1) == MULT_EXPR)
10367 || ((TREE_CODE (arg1) == PLUS_EXPR
10368 || TREE_CODE (arg1) == MINUS_EXPR)
10369 && TREE_CODE (arg0) == MULT_EXPR)))
10371 tree parg0, parg1, parg, marg;
10372 enum tree_code pcode;
10374 if (TREE_CODE (arg1) == MULT_EXPR)
10375 parg = arg0, marg = arg1;
10376 else
10377 parg = arg1, marg = arg0;
10378 pcode = TREE_CODE (parg);
10379 parg0 = TREE_OPERAND (parg, 0);
10380 parg1 = TREE_OPERAND (parg, 1);
10381 STRIP_NOPS (parg0);
10382 STRIP_NOPS (parg1);
10384 if (TREE_CODE (parg0) == MULT_EXPR
10385 && TREE_CODE (parg1) != MULT_EXPR)
10386 return fold_build2_loc (loc, pcode, type,
10387 fold_build2_loc (loc, PLUS_EXPR, type,
10388 fold_convert_loc (loc, type,
10389 parg0),
10390 fold_convert_loc (loc, type,
10391 marg)),
10392 fold_convert_loc (loc, type, parg1));
10393 if (TREE_CODE (parg0) != MULT_EXPR
10394 && TREE_CODE (parg1) == MULT_EXPR)
10395 return
10396 fold_build2_loc (loc, PLUS_EXPR, type,
10397 fold_convert_loc (loc, type, parg0),
10398 fold_build2_loc (loc, pcode, type,
10399 fold_convert_loc (loc, type, marg),
10400 fold_convert_loc (loc, type,
10401 parg1)));
10404 else
10406 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10407 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10408 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10410 /* Likewise if the operands are reversed. */
10411 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10412 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10414 /* Convert X + -C into X - C. */
10415 if (TREE_CODE (arg1) == REAL_CST
10416 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10418 tem = fold_negate_const (arg1, type);
10419 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10420 return fold_build2_loc (loc, MINUS_EXPR, type,
10421 fold_convert_loc (loc, type, arg0),
10422 fold_convert_loc (loc, type, tem));
10425 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10426 to __complex__ ( x, y ). This is not the same for SNaNs or
10427 if signed zeros are involved. */
10428 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10429 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10430 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10432 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10433 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10434 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10435 bool arg0rz = false, arg0iz = false;
10436 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10437 || (arg0i && (arg0iz = real_zerop (arg0i))))
10439 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10440 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10441 if (arg0rz && arg1i && real_zerop (arg1i))
10443 tree rp = arg1r ? arg1r
10444 : build1 (REALPART_EXPR, rtype, arg1);
10445 tree ip = arg0i ? arg0i
10446 : build1 (IMAGPART_EXPR, rtype, arg0);
10447 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10449 else if (arg0iz && arg1r && real_zerop (arg1r))
10451 tree rp = arg0r ? arg0r
10452 : build1 (REALPART_EXPR, rtype, arg0);
10453 tree ip = arg1i ? arg1i
10454 : build1 (IMAGPART_EXPR, rtype, arg1);
10455 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10460 if (flag_unsafe_math_optimizations
10461 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10462 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10463 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10464 return tem;
10466 /* Convert x+x into x*2.0. */
10467 if (operand_equal_p (arg0, arg1, 0)
10468 && SCALAR_FLOAT_TYPE_P (type))
10469 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10470 build_real (type, dconst2));
10472 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10473 We associate floats only if the user has specified
10474 -fassociative-math. */
10475 if (flag_associative_math
10476 && TREE_CODE (arg1) == PLUS_EXPR
10477 && TREE_CODE (arg0) != MULT_EXPR)
10479 tree tree10 = TREE_OPERAND (arg1, 0);
10480 tree tree11 = TREE_OPERAND (arg1, 1);
10481 if (TREE_CODE (tree11) == MULT_EXPR
10482 && TREE_CODE (tree10) == MULT_EXPR)
10484 tree tree0;
10485 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10486 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10489 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10490 We associate floats only if the user has specified
10491 -fassociative-math. */
10492 if (flag_associative_math
10493 && TREE_CODE (arg0) == PLUS_EXPR
10494 && TREE_CODE (arg1) != MULT_EXPR)
10496 tree tree00 = TREE_OPERAND (arg0, 0);
10497 tree tree01 = TREE_OPERAND (arg0, 1);
10498 if (TREE_CODE (tree01) == MULT_EXPR
10499 && TREE_CODE (tree00) == MULT_EXPR)
10501 tree tree0;
10502 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10503 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10508 bit_rotate:
10509 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10510 is a rotate of A by C1 bits. */
10511 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10512 is a rotate of A by B bits. */
10514 enum tree_code code0, code1;
10515 tree rtype;
10516 code0 = TREE_CODE (arg0);
10517 code1 = TREE_CODE (arg1);
10518 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10519 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10520 && operand_equal_p (TREE_OPERAND (arg0, 0),
10521 TREE_OPERAND (arg1, 0), 0)
10522 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10523 TYPE_UNSIGNED (rtype))
10524 /* Only create rotates in complete modes. Other cases are not
10525 expanded properly. */
10526 && (element_precision (rtype)
10527 == element_precision (TYPE_MODE (rtype))))
10529 tree tree01, tree11;
10530 enum tree_code code01, code11;
10532 tree01 = TREE_OPERAND (arg0, 1);
10533 tree11 = TREE_OPERAND (arg1, 1);
10534 STRIP_NOPS (tree01);
10535 STRIP_NOPS (tree11);
10536 code01 = TREE_CODE (tree01);
10537 code11 = TREE_CODE (tree11);
10538 if (code01 == INTEGER_CST
10539 && code11 == INTEGER_CST
10540 && TREE_INT_CST_HIGH (tree01) == 0
10541 && TREE_INT_CST_HIGH (tree11) == 0
10542 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10543 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10545 tem = build2_loc (loc, LROTATE_EXPR,
10546 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10547 TREE_OPERAND (arg0, 0),
10548 code0 == LSHIFT_EXPR ? tree01 : tree11);
10549 return fold_convert_loc (loc, type, tem);
10551 else if (code11 == MINUS_EXPR)
10553 tree tree110, tree111;
10554 tree110 = TREE_OPERAND (tree11, 0);
10555 tree111 = TREE_OPERAND (tree11, 1);
10556 STRIP_NOPS (tree110);
10557 STRIP_NOPS (tree111);
10558 if (TREE_CODE (tree110) == INTEGER_CST
10559 && 0 == compare_tree_int (tree110,
10560 element_precision
10561 (TREE_TYPE (TREE_OPERAND
10562 (arg0, 0))))
10563 && operand_equal_p (tree01, tree111, 0))
10564 return
10565 fold_convert_loc (loc, type,
10566 build2 ((code0 == LSHIFT_EXPR
10567 ? LROTATE_EXPR
10568 : RROTATE_EXPR),
10569 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10570 TREE_OPERAND (arg0, 0), tree01));
10572 else if (code01 == MINUS_EXPR)
10574 tree tree010, tree011;
10575 tree010 = TREE_OPERAND (tree01, 0);
10576 tree011 = TREE_OPERAND (tree01, 1);
10577 STRIP_NOPS (tree010);
10578 STRIP_NOPS (tree011);
10579 if (TREE_CODE (tree010) == INTEGER_CST
10580 && 0 == compare_tree_int (tree010,
10581 element_precision
10582 (TREE_TYPE (TREE_OPERAND
10583 (arg0, 0))))
10584 && operand_equal_p (tree11, tree011, 0))
10585 return fold_convert_loc
10586 (loc, type,
10587 build2 ((code0 != LSHIFT_EXPR
10588 ? LROTATE_EXPR
10589 : RROTATE_EXPR),
10590 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10591 TREE_OPERAND (arg0, 0), tree11));
10596 associate:
10597 /* In most languages, can't associate operations on floats through
10598 parentheses. Rather than remember where the parentheses were, we
10599 don't associate floats at all, unless the user has specified
10600 -fassociative-math.
10601 And, we need to make sure type is not saturating. */
10603 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10604 && !TYPE_SATURATING (type))
10606 tree var0, con0, lit0, minus_lit0;
10607 tree var1, con1, lit1, minus_lit1;
10608 tree atype = type;
10609 bool ok = true;
10611 /* Split both trees into variables, constants, and literals. Then
10612 associate each group together, the constants with literals,
10613 then the result with variables. This increases the chances of
10614 literals being recombined later and of generating relocatable
10615 expressions for the sum of a constant and literal. */
10616 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10617 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10618 code == MINUS_EXPR);
10620 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10621 if (code == MINUS_EXPR)
10622 code = PLUS_EXPR;
10624 /* With undefined overflow prefer doing association in a type
10625 which wraps on overflow, if that is one of the operand types. */
10626 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10627 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10629 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10630 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10631 atype = TREE_TYPE (arg0);
10632 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10633 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10634 atype = TREE_TYPE (arg1);
10635 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10638 /* With undefined overflow we can only associate constants with one
10639 variable, and constants whose association doesn't overflow. */
10640 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10641 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10643 if (var0 && var1)
10645 tree tmp0 = var0;
10646 tree tmp1 = var1;
10648 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10649 tmp0 = TREE_OPERAND (tmp0, 0);
10650 if (CONVERT_EXPR_P (tmp0)
10651 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10652 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10653 <= TYPE_PRECISION (atype)))
10654 tmp0 = TREE_OPERAND (tmp0, 0);
10655 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10656 tmp1 = TREE_OPERAND (tmp1, 0);
10657 if (CONVERT_EXPR_P (tmp1)
10658 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10659 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10660 <= TYPE_PRECISION (atype)))
10661 tmp1 = TREE_OPERAND (tmp1, 0);
10662 /* The only case we can still associate with two variables
10663 is if they are the same, modulo negation and bit-pattern
10664 preserving conversions. */
10665 if (!operand_equal_p (tmp0, tmp1, 0))
10666 ok = false;
10670 /* Only do something if we found more than two objects. Otherwise,
10671 nothing has changed and we risk infinite recursion. */
10672 if (ok
10673 && (2 < ((var0 != 0) + (var1 != 0)
10674 + (con0 != 0) + (con1 != 0)
10675 + (lit0 != 0) + (lit1 != 0)
10676 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10678 bool any_overflows = false;
10679 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10680 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10681 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10682 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10683 var0 = associate_trees (loc, var0, var1, code, atype);
10684 con0 = associate_trees (loc, con0, con1, code, atype);
10685 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10686 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10687 code, atype);
10689 /* Preserve the MINUS_EXPR if the negative part of the literal is
10690 greater than the positive part. Otherwise, the multiplicative
10691 folding code (i.e extract_muldiv) may be fooled in case
10692 unsigned constants are subtracted, like in the following
10693 example: ((X*2 + 4) - 8U)/2. */
10694 if (minus_lit0 && lit0)
10696 if (TREE_CODE (lit0) == INTEGER_CST
10697 && TREE_CODE (minus_lit0) == INTEGER_CST
10698 && tree_int_cst_lt (lit0, minus_lit0))
10700 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10701 MINUS_EXPR, atype);
10702 lit0 = 0;
10704 else
10706 lit0 = associate_trees (loc, lit0, minus_lit0,
10707 MINUS_EXPR, atype);
10708 minus_lit0 = 0;
10712 /* Don't introduce overflows through reassociation. */
10713 if (!any_overflows
10714 && ((lit0 && TREE_OVERFLOW (lit0))
10715 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10716 return NULL_TREE;
10718 if (minus_lit0)
10720 if (con0 == 0)
10721 return
10722 fold_convert_loc (loc, type,
10723 associate_trees (loc, var0, minus_lit0,
10724 MINUS_EXPR, atype));
10725 else
10727 con0 = associate_trees (loc, con0, minus_lit0,
10728 MINUS_EXPR, atype);
10729 return
10730 fold_convert_loc (loc, type,
10731 associate_trees (loc, var0, con0,
10732 PLUS_EXPR, atype));
10736 con0 = associate_trees (loc, con0, lit0, code, atype);
10737 return
10738 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10739 code, atype));
10743 return NULL_TREE;
10745 case MINUS_EXPR:
10746 /* Pointer simplifications for subtraction, simple reassociations. */
10747 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10749 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10750 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10751 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10753 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10754 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10755 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10756 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10757 return fold_build2_loc (loc, PLUS_EXPR, type,
10758 fold_build2_loc (loc, MINUS_EXPR, type,
10759 arg00, arg10),
10760 fold_build2_loc (loc, MINUS_EXPR, type,
10761 arg01, arg11));
10763 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10764 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10766 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10767 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10768 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10769 fold_convert_loc (loc, type, arg1));
10770 if (tmp)
10771 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10774 /* A - (-B) -> A + B */
10775 if (TREE_CODE (arg1) == NEGATE_EXPR)
10776 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10777 fold_convert_loc (loc, type,
10778 TREE_OPERAND (arg1, 0)));
10780 /* Disable further optimizations involving UPC shared pointers,
10781 because integers are not interoperable with shared pointers.
10782 (The test below also detects pointer difference between
10783 shared pointers, which cannot be folded. */
10785 if (TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10786 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10787 return NULL_TREE;
10789 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10790 if (TREE_CODE (arg0) == NEGATE_EXPR
10791 && negate_expr_p (arg1)
10792 && reorder_operands_p (arg0, arg1))
10793 return fold_build2_loc (loc, MINUS_EXPR, type,
10794 fold_convert_loc (loc, type,
10795 negate_expr (arg1)),
10796 fold_convert_loc (loc, type,
10797 TREE_OPERAND (arg0, 0)));
10798 /* Convert -A - 1 to ~A. */
10799 if (TREE_CODE (type) != COMPLEX_TYPE
10800 && TREE_CODE (arg0) == NEGATE_EXPR
10801 && integer_onep (arg1)
10802 && !TYPE_OVERFLOW_TRAPS (type))
10803 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10804 fold_convert_loc (loc, type,
10805 TREE_OPERAND (arg0, 0)));
10807 /* Convert -1 - A to ~A. */
10808 if (TREE_CODE (type) != COMPLEX_TYPE
10809 && integer_all_onesp (arg0))
10810 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10813 /* X - (X / Y) * Y is X % Y. */
10814 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10815 && TREE_CODE (arg1) == MULT_EXPR
10816 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10817 && operand_equal_p (arg0,
10818 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10819 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10820 TREE_OPERAND (arg1, 1), 0))
10821 return
10822 fold_convert_loc (loc, type,
10823 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10824 arg0, TREE_OPERAND (arg1, 1)));
10826 if (! FLOAT_TYPE_P (type))
10828 if (integer_zerop (arg0))
10829 return negate_expr (fold_convert_loc (loc, type, arg1));
10830 if (integer_zerop (arg1))
10831 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10833 /* Fold A - (A & B) into ~B & A. */
10834 if (!TREE_SIDE_EFFECTS (arg0)
10835 && TREE_CODE (arg1) == BIT_AND_EXPR)
10837 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10839 tree arg10 = fold_convert_loc (loc, type,
10840 TREE_OPERAND (arg1, 0));
10841 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10842 fold_build1_loc (loc, BIT_NOT_EXPR,
10843 type, arg10),
10844 fold_convert_loc (loc, type, arg0));
10846 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10848 tree arg11 = fold_convert_loc (loc,
10849 type, TREE_OPERAND (arg1, 1));
10850 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10851 fold_build1_loc (loc, BIT_NOT_EXPR,
10852 type, arg11),
10853 fold_convert_loc (loc, type, arg0));
10857 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10858 any power of 2 minus 1. */
10859 if (TREE_CODE (arg0) == BIT_AND_EXPR
10860 && TREE_CODE (arg1) == BIT_AND_EXPR
10861 && operand_equal_p (TREE_OPERAND (arg0, 0),
10862 TREE_OPERAND (arg1, 0), 0))
10864 tree mask0 = TREE_OPERAND (arg0, 1);
10865 tree mask1 = TREE_OPERAND (arg1, 1);
10866 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10868 if (operand_equal_p (tem, mask1, 0))
10870 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10871 TREE_OPERAND (arg0, 0), mask1);
10872 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10877 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10878 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10879 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10881 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10882 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10883 (-ARG1 + ARG0) reduces to -ARG1. */
10884 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10885 return negate_expr (fold_convert_loc (loc, type, arg1));
10887 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10888 __complex__ ( x, -y ). This is not the same for SNaNs or if
10889 signed zeros are involved. */
10890 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10891 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10892 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10894 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10895 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10896 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10897 bool arg0rz = false, arg0iz = false;
10898 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10899 || (arg0i && (arg0iz = real_zerop (arg0i))))
10901 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10902 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10903 if (arg0rz && arg1i && real_zerop (arg1i))
10905 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10906 arg1r ? arg1r
10907 : build1 (REALPART_EXPR, rtype, arg1));
10908 tree ip = arg0i ? arg0i
10909 : build1 (IMAGPART_EXPR, rtype, arg0);
10910 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10912 else if (arg0iz && arg1r && real_zerop (arg1r))
10914 tree rp = arg0r ? arg0r
10915 : build1 (REALPART_EXPR, rtype, arg0);
10916 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10917 arg1i ? arg1i
10918 : build1 (IMAGPART_EXPR, rtype, arg1));
10919 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10924 /* Fold &x - &x. This can happen from &x.foo - &x.
10925 This is unsafe for certain floats even in non-IEEE formats.
10926 In IEEE, it is unsafe because it does wrong for NaNs.
10927 Also note that operand_equal_p is always false if an operand
10928 is volatile. */
10930 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10931 && operand_equal_p (arg0, arg1, 0))
10932 return build_zero_cst (type);
10934 /* A - B -> A + (-B) if B is easily negatable. */
10935 if (negate_expr_p (arg1)
10936 && ((FLOAT_TYPE_P (type)
10937 /* Avoid this transformation if B is a positive REAL_CST. */
10938 && (TREE_CODE (arg1) != REAL_CST
10939 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10940 || INTEGRAL_TYPE_P (type)))
10941 return fold_build2_loc (loc, PLUS_EXPR, type,
10942 fold_convert_loc (loc, type, arg0),
10943 fold_convert_loc (loc, type,
10944 negate_expr (arg1)));
10946 /* Try folding difference of addresses. */
10948 HOST_WIDE_INT diff;
10950 if ((TREE_CODE (arg0) == ADDR_EXPR
10951 || TREE_CODE (arg1) == ADDR_EXPR)
10952 && ptr_difference_const (arg0, arg1, &diff))
10953 return build_int_cst_type (type, diff);
10956 /* Fold &a[i] - &a[j] to i-j. */
10957 if (TREE_CODE (arg0) == ADDR_EXPR
10958 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10959 && TREE_CODE (arg1) == ADDR_EXPR
10960 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10962 tree tem = fold_addr_of_array_ref_difference (loc, type,
10963 TREE_OPERAND (arg0, 0),
10964 TREE_OPERAND (arg1, 0));
10965 if (tem)
10966 return tem;
10969 if (FLOAT_TYPE_P (type)
10970 && flag_unsafe_math_optimizations
10971 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10972 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10973 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10974 return tem;
10976 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10977 one. Make sure the type is not saturating and has the signedness of
10978 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10979 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10980 if ((TREE_CODE (arg0) == MULT_EXPR
10981 || TREE_CODE (arg1) == MULT_EXPR)
10982 && !TYPE_SATURATING (type)
10983 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10984 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10985 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10987 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10988 if (tem)
10989 return tem;
10992 goto associate;
10994 case MULT_EXPR:
10995 /* (-A) * (-B) -> A * B */
10996 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10997 return fold_build2_loc (loc, MULT_EXPR, type,
10998 fold_convert_loc (loc, type,
10999 TREE_OPERAND (arg0, 0)),
11000 fold_convert_loc (loc, type,
11001 negate_expr (arg1)));
11002 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11003 return fold_build2_loc (loc, MULT_EXPR, type,
11004 fold_convert_loc (loc, type,
11005 negate_expr (arg0)),
11006 fold_convert_loc (loc, type,
11007 TREE_OPERAND (arg1, 0)));
11009 if (! FLOAT_TYPE_P (type))
11011 if (integer_zerop (arg1))
11012 return omit_one_operand_loc (loc, type, arg1, arg0);
11013 if (integer_onep (arg1))
11014 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11015 /* Transform x * -1 into -x. Make sure to do the negation
11016 on the original operand with conversions not stripped
11017 because we can only strip non-sign-changing conversions. */
11018 if (integer_minus_onep (arg1))
11019 return fold_convert_loc (loc, type, negate_expr (op0));
11020 /* Transform x * -C into -x * C if x is easily negatable. */
11021 if (TREE_CODE (arg1) == INTEGER_CST
11022 && tree_int_cst_sgn (arg1) == -1
11023 && negate_expr_p (arg0)
11024 && (tem = negate_expr (arg1)) != arg1
11025 && !TREE_OVERFLOW (tem))
11026 return fold_build2_loc (loc, MULT_EXPR, type,
11027 fold_convert_loc (loc, type,
11028 negate_expr (arg0)),
11029 tem);
11031 /* (a * (1 << b)) is (a << b) */
11032 if (TREE_CODE (arg1) == LSHIFT_EXPR
11033 && integer_onep (TREE_OPERAND (arg1, 0)))
11034 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11035 TREE_OPERAND (arg1, 1));
11036 if (TREE_CODE (arg0) == LSHIFT_EXPR
11037 && integer_onep (TREE_OPERAND (arg0, 0)))
11038 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11039 TREE_OPERAND (arg0, 1));
11041 /* (A + A) * C -> A * 2 * C */
11042 if (TREE_CODE (arg0) == PLUS_EXPR
11043 && TREE_CODE (arg1) == INTEGER_CST
11044 && operand_equal_p (TREE_OPERAND (arg0, 0),
11045 TREE_OPERAND (arg0, 1), 0))
11046 return fold_build2_loc (loc, MULT_EXPR, type,
11047 omit_one_operand_loc (loc, type,
11048 TREE_OPERAND (arg0, 0),
11049 TREE_OPERAND (arg0, 1)),
11050 fold_build2_loc (loc, MULT_EXPR, type,
11051 build_int_cst (type, 2) , arg1));
11053 strict_overflow_p = false;
11054 if (TREE_CODE (arg1) == INTEGER_CST
11055 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11056 &strict_overflow_p)))
11058 if (strict_overflow_p)
11059 fold_overflow_warning (("assuming signed overflow does not "
11060 "occur when simplifying "
11061 "multiplication"),
11062 WARN_STRICT_OVERFLOW_MISC);
11063 return fold_convert_loc (loc, type, tem);
11066 /* Optimize z * conj(z) for integer complex numbers. */
11067 if (TREE_CODE (arg0) == CONJ_EXPR
11068 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11069 return fold_mult_zconjz (loc, type, arg1);
11070 if (TREE_CODE (arg1) == CONJ_EXPR
11071 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11072 return fold_mult_zconjz (loc, type, arg0);
11074 else
11076 /* Maybe fold x * 0 to 0. The expressions aren't the same
11077 when x is NaN, since x * 0 is also NaN. Nor are they the
11078 same in modes with signed zeros, since multiplying a
11079 negative value by 0 gives -0, not +0. */
11080 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11081 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11082 && real_zerop (arg1))
11083 return omit_one_operand_loc (loc, type, arg1, arg0);
11084 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11085 Likewise for complex arithmetic with signed zeros. */
11086 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11087 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11088 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11089 && real_onep (arg1))
11090 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11092 /* Transform x * -1.0 into -x. */
11093 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11094 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11095 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11096 && real_minus_onep (arg1))
11097 return fold_convert_loc (loc, type, negate_expr (arg0));
11099 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11100 the result for floating point types due to rounding so it is applied
11101 only if -fassociative-math was specify. */
11102 if (flag_associative_math
11103 && TREE_CODE (arg0) == RDIV_EXPR
11104 && TREE_CODE (arg1) == REAL_CST
11105 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11107 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11108 arg1);
11109 if (tem)
11110 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11111 TREE_OPERAND (arg0, 1));
11114 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11115 if (operand_equal_p (arg0, arg1, 0))
11117 tree tem = fold_strip_sign_ops (arg0);
11118 if (tem != NULL_TREE)
11120 tem = fold_convert_loc (loc, type, tem);
11121 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11125 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11126 This is not the same for NaNs or if signed zeros are
11127 involved. */
11128 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11129 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11130 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11131 && TREE_CODE (arg1) == COMPLEX_CST
11132 && real_zerop (TREE_REALPART (arg1)))
11134 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11135 if (real_onep (TREE_IMAGPART (arg1)))
11136 return
11137 fold_build2_loc (loc, COMPLEX_EXPR, type,
11138 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11139 rtype, arg0)),
11140 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11141 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11142 return
11143 fold_build2_loc (loc, COMPLEX_EXPR, type,
11144 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11145 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11146 rtype, arg0)));
11149 /* Optimize z * conj(z) for floating point complex numbers.
11150 Guarded by flag_unsafe_math_optimizations as non-finite
11151 imaginary components don't produce scalar results. */
11152 if (flag_unsafe_math_optimizations
11153 && TREE_CODE (arg0) == CONJ_EXPR
11154 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11155 return fold_mult_zconjz (loc, type, arg1);
11156 if (flag_unsafe_math_optimizations
11157 && TREE_CODE (arg1) == CONJ_EXPR
11158 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11159 return fold_mult_zconjz (loc, type, arg0);
11161 if (flag_unsafe_math_optimizations)
11163 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11164 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11166 /* Optimizations of root(...)*root(...). */
11167 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11169 tree rootfn, arg;
11170 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11171 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11173 /* Optimize sqrt(x)*sqrt(x) as x. */
11174 if (BUILTIN_SQRT_P (fcode0)
11175 && operand_equal_p (arg00, arg10, 0)
11176 && ! HONOR_SNANS (TYPE_MODE (type)))
11177 return arg00;
11179 /* Optimize root(x)*root(y) as root(x*y). */
11180 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11181 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11182 return build_call_expr_loc (loc, rootfn, 1, arg);
11185 /* Optimize expN(x)*expN(y) as expN(x+y). */
11186 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11188 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11189 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11190 CALL_EXPR_ARG (arg0, 0),
11191 CALL_EXPR_ARG (arg1, 0));
11192 return build_call_expr_loc (loc, expfn, 1, arg);
11195 /* Optimizations of pow(...)*pow(...). */
11196 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11197 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11198 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11200 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11201 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11202 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11203 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11205 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11206 if (operand_equal_p (arg01, arg11, 0))
11208 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11209 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11210 arg00, arg10);
11211 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11214 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11215 if (operand_equal_p (arg00, arg10, 0))
11217 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11218 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11219 arg01, arg11);
11220 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11224 /* Optimize tan(x)*cos(x) as sin(x). */
11225 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11226 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11227 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11228 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11229 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11230 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11231 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11232 CALL_EXPR_ARG (arg1, 0), 0))
11234 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11236 if (sinfn != NULL_TREE)
11237 return build_call_expr_loc (loc, sinfn, 1,
11238 CALL_EXPR_ARG (arg0, 0));
11241 /* Optimize x*pow(x,c) as pow(x,c+1). */
11242 if (fcode1 == BUILT_IN_POW
11243 || fcode1 == BUILT_IN_POWF
11244 || fcode1 == BUILT_IN_POWL)
11246 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11247 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11248 if (TREE_CODE (arg11) == REAL_CST
11249 && !TREE_OVERFLOW (arg11)
11250 && operand_equal_p (arg0, arg10, 0))
11252 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11253 REAL_VALUE_TYPE c;
11254 tree arg;
11256 c = TREE_REAL_CST (arg11);
11257 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11258 arg = build_real (type, c);
11259 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11263 /* Optimize pow(x,c)*x as pow(x,c+1). */
11264 if (fcode0 == BUILT_IN_POW
11265 || fcode0 == BUILT_IN_POWF
11266 || fcode0 == BUILT_IN_POWL)
11268 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11269 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11270 if (TREE_CODE (arg01) == REAL_CST
11271 && !TREE_OVERFLOW (arg01)
11272 && operand_equal_p (arg1, arg00, 0))
11274 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11275 REAL_VALUE_TYPE c;
11276 tree arg;
11278 c = TREE_REAL_CST (arg01);
11279 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11280 arg = build_real (type, c);
11281 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11285 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11286 if (!in_gimple_form
11287 && optimize
11288 && operand_equal_p (arg0, arg1, 0))
11290 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11292 if (powfn)
11294 tree arg = build_real (type, dconst2);
11295 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11300 goto associate;
11302 case BIT_IOR_EXPR:
11303 bit_ior:
11304 if (integer_all_onesp (arg1))
11305 return omit_one_operand_loc (loc, type, arg1, arg0);
11306 if (integer_zerop (arg1))
11307 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11308 if (operand_equal_p (arg0, arg1, 0))
11309 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11311 /* ~X | X is -1. */
11312 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11313 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11315 t1 = build_zero_cst (type);
11316 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11317 return omit_one_operand_loc (loc, type, t1, arg1);
11320 /* X | ~X is -1. */
11321 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11322 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11324 t1 = build_zero_cst (type);
11325 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11326 return omit_one_operand_loc (loc, type, t1, arg0);
11329 /* Canonicalize (X & C1) | C2. */
11330 if (TREE_CODE (arg0) == BIT_AND_EXPR
11331 && TREE_CODE (arg1) == INTEGER_CST
11332 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11334 double_int c1, c2, c3, msk;
11335 int width = TYPE_PRECISION (type), w;
11336 bool try_simplify = true;
11338 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11339 c2 = tree_to_double_int (arg1);
11341 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11342 if ((c1 & c2) == c1)
11343 return omit_one_operand_loc (loc, type, arg1,
11344 TREE_OPERAND (arg0, 0));
11346 msk = double_int::mask (width);
11348 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11349 if (msk.and_not (c1 | c2).is_zero ())
11350 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11351 TREE_OPERAND (arg0, 0), arg1);
11353 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11354 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11355 mode which allows further optimizations. */
11356 c1 &= msk;
11357 c2 &= msk;
11358 c3 = c1.and_not (c2);
11359 for (w = BITS_PER_UNIT;
11360 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11361 w <<= 1)
11363 unsigned HOST_WIDE_INT mask
11364 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11365 if (((c1.low | c2.low) & mask) == mask
11366 && (c1.low & ~mask) == 0 && c1.high == 0)
11368 c3 = double_int::from_uhwi (mask);
11369 break;
11373 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11374 with that optimization from the BIT_AND_EXPR optimizations.
11375 This could end up in an infinite recursion. */
11376 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11377 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11378 == INTEGER_CST)
11380 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11381 double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
11383 try_simplify = (masked != c1);
11386 if (try_simplify && c3 != c1)
11387 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11388 fold_build2_loc (loc, BIT_AND_EXPR, type,
11389 TREE_OPERAND (arg0, 0),
11390 double_int_to_tree (type,
11391 c3)),
11392 arg1);
11395 /* (X & Y) | Y is (X, Y). */
11396 if (TREE_CODE (arg0) == BIT_AND_EXPR
11397 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11398 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11399 /* (X & Y) | X is (Y, X). */
11400 if (TREE_CODE (arg0) == BIT_AND_EXPR
11401 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11402 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11403 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11404 /* X | (X & Y) is (Y, X). */
11405 if (TREE_CODE (arg1) == BIT_AND_EXPR
11406 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11407 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11408 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11409 /* X | (Y & X) is (Y, X). */
11410 if (TREE_CODE (arg1) == BIT_AND_EXPR
11411 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11412 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11413 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11415 /* (X & ~Y) | (~X & Y) is X ^ Y */
11416 if (TREE_CODE (arg0) == BIT_AND_EXPR
11417 && TREE_CODE (arg1) == BIT_AND_EXPR)
11419 tree a0, a1, l0, l1, n0, n1;
11421 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11422 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11424 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11425 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11427 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11428 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11430 if ((operand_equal_p (n0, a0, 0)
11431 && operand_equal_p (n1, a1, 0))
11432 || (operand_equal_p (n0, a1, 0)
11433 && operand_equal_p (n1, a0, 0)))
11434 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11437 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11438 if (t1 != NULL_TREE)
11439 return t1;
11441 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11443 This results in more efficient code for machines without a NAND
11444 instruction. Combine will canonicalize to the first form
11445 which will allow use of NAND instructions provided by the
11446 backend if they exist. */
11447 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11448 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11450 return
11451 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11452 build2 (BIT_AND_EXPR, type,
11453 fold_convert_loc (loc, type,
11454 TREE_OPERAND (arg0, 0)),
11455 fold_convert_loc (loc, type,
11456 TREE_OPERAND (arg1, 0))));
11459 /* See if this can be simplified into a rotate first. If that
11460 is unsuccessful continue in the association code. */
11461 goto bit_rotate;
11463 case BIT_XOR_EXPR:
11464 if (integer_zerop (arg1))
11465 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11466 if (integer_all_onesp (arg1))
11467 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11468 if (operand_equal_p (arg0, arg1, 0))
11469 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11471 /* ~X ^ X is -1. */
11472 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11473 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11475 t1 = build_zero_cst (type);
11476 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11477 return omit_one_operand_loc (loc, type, t1, arg1);
11480 /* X ^ ~X is -1. */
11481 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11482 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11484 t1 = build_zero_cst (type);
11485 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11486 return omit_one_operand_loc (loc, type, t1, arg0);
11489 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11490 with a constant, and the two constants have no bits in common,
11491 we should treat this as a BIT_IOR_EXPR since this may produce more
11492 simplifications. */
11493 if (TREE_CODE (arg0) == BIT_AND_EXPR
11494 && TREE_CODE (arg1) == BIT_AND_EXPR
11495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11496 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11497 && integer_zerop (const_binop (BIT_AND_EXPR,
11498 TREE_OPERAND (arg0, 1),
11499 TREE_OPERAND (arg1, 1))))
11501 code = BIT_IOR_EXPR;
11502 goto bit_ior;
11505 /* (X | Y) ^ X -> Y & ~ X*/
11506 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11507 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11509 tree t2 = TREE_OPERAND (arg0, 1);
11510 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11511 arg1);
11512 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11513 fold_convert_loc (loc, type, t2),
11514 fold_convert_loc (loc, type, t1));
11515 return t1;
11518 /* (Y | X) ^ X -> Y & ~ X*/
11519 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11520 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11522 tree t2 = TREE_OPERAND (arg0, 0);
11523 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11524 arg1);
11525 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11526 fold_convert_loc (loc, type, t2),
11527 fold_convert_loc (loc, type, t1));
11528 return t1;
11531 /* X ^ (X | Y) -> Y & ~ X*/
11532 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11533 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11535 tree t2 = TREE_OPERAND (arg1, 1);
11536 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11537 arg0);
11538 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11539 fold_convert_loc (loc, type, t2),
11540 fold_convert_loc (loc, type, t1));
11541 return t1;
11544 /* X ^ (Y | X) -> Y & ~ X*/
11545 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11546 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11548 tree t2 = TREE_OPERAND (arg1, 0);
11549 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11550 arg0);
11551 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11552 fold_convert_loc (loc, type, t2),
11553 fold_convert_loc (loc, type, t1));
11554 return t1;
11557 /* Convert ~X ^ ~Y to X ^ Y. */
11558 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11559 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11560 return fold_build2_loc (loc, code, type,
11561 fold_convert_loc (loc, type,
11562 TREE_OPERAND (arg0, 0)),
11563 fold_convert_loc (loc, type,
11564 TREE_OPERAND (arg1, 0)));
11566 /* Convert ~X ^ C to X ^ ~C. */
11567 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11568 && TREE_CODE (arg1) == INTEGER_CST)
11569 return fold_build2_loc (loc, code, type,
11570 fold_convert_loc (loc, type,
11571 TREE_OPERAND (arg0, 0)),
11572 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11574 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11575 if (TREE_CODE (arg0) == BIT_AND_EXPR
11576 && integer_onep (TREE_OPERAND (arg0, 1))
11577 && integer_onep (arg1))
11578 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11579 build_zero_cst (TREE_TYPE (arg0)));
11581 /* Fold (X & Y) ^ Y as ~X & Y. */
11582 if (TREE_CODE (arg0) == BIT_AND_EXPR
11583 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11585 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11586 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11587 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11588 fold_convert_loc (loc, type, arg1));
11590 /* Fold (X & Y) ^ X as ~Y & X. */
11591 if (TREE_CODE (arg0) == BIT_AND_EXPR
11592 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11593 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11595 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11596 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11597 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11598 fold_convert_loc (loc, type, arg1));
11600 /* Fold X ^ (X & Y) as X & ~Y. */
11601 if (TREE_CODE (arg1) == BIT_AND_EXPR
11602 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11604 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11605 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11606 fold_convert_loc (loc, type, arg0),
11607 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11609 /* Fold X ^ (Y & X) as ~Y & X. */
11610 if (TREE_CODE (arg1) == BIT_AND_EXPR
11611 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11612 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11614 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11615 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11616 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11617 fold_convert_loc (loc, type, arg0));
11620 /* See if this can be simplified into a rotate first. If that
11621 is unsuccessful continue in the association code. */
11622 goto bit_rotate;
11624 case BIT_AND_EXPR:
11625 if (integer_all_onesp (arg1))
11626 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11627 if (integer_zerop (arg1))
11628 return omit_one_operand_loc (loc, type, arg1, arg0);
11629 if (operand_equal_p (arg0, arg1, 0))
11630 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11632 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11633 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11634 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11635 || (TREE_CODE (arg0) == EQ_EXPR
11636 && integer_zerop (TREE_OPERAND (arg0, 1))))
11637 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11638 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11640 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11641 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11642 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11643 || (TREE_CODE (arg1) == EQ_EXPR
11644 && integer_zerop (TREE_OPERAND (arg1, 1))))
11645 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11646 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11648 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11649 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11650 && TREE_CODE (arg1) == INTEGER_CST
11651 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11653 tree tmp1 = fold_convert_loc (loc, type, arg1);
11654 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11655 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11656 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11657 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11658 return
11659 fold_convert_loc (loc, type,
11660 fold_build2_loc (loc, BIT_IOR_EXPR,
11661 type, tmp2, tmp3));
11664 /* (X | Y) & Y is (X, Y). */
11665 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11666 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11667 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11668 /* (X | Y) & X is (Y, X). */
11669 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11670 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11671 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11672 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11673 /* X & (X | Y) is (Y, X). */
11674 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11675 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11676 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11677 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11678 /* X & (Y | X) is (Y, X). */
11679 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11680 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11681 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11682 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11684 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11685 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11686 && integer_onep (TREE_OPERAND (arg0, 1))
11687 && integer_onep (arg1))
11689 tree tem2;
11690 tem = TREE_OPERAND (arg0, 0);
11691 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11692 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11693 tem, tem2);
11694 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11695 build_zero_cst (TREE_TYPE (tem)));
11697 /* Fold ~X & 1 as (X & 1) == 0. */
11698 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11699 && integer_onep (arg1))
11701 tree tem2;
11702 tem = TREE_OPERAND (arg0, 0);
11703 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11704 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11705 tem, tem2);
11706 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11707 build_zero_cst (TREE_TYPE (tem)));
11709 /* Fold !X & 1 as X == 0. */
11710 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11711 && integer_onep (arg1))
11713 tem = TREE_OPERAND (arg0, 0);
11714 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11715 build_zero_cst (TREE_TYPE (tem)));
11718 /* Fold (X ^ Y) & Y as ~X & Y. */
11719 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11720 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11722 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11723 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11724 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11725 fold_convert_loc (loc, type, arg1));
11727 /* Fold (X ^ Y) & X as ~Y & X. */
11728 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11729 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11730 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11732 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11733 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11734 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11735 fold_convert_loc (loc, type, arg1));
11737 /* Fold X & (X ^ Y) as X & ~Y. */
11738 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11739 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11741 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11742 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11743 fold_convert_loc (loc, type, arg0),
11744 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11746 /* Fold X & (Y ^ X) as ~Y & X. */
11747 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11748 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11749 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11751 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11752 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11753 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11754 fold_convert_loc (loc, type, arg0));
11757 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11758 multiple of 1 << CST. */
11759 if (TREE_CODE (arg1) == INTEGER_CST)
11761 double_int cst1 = tree_to_double_int (arg1);
11762 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11763 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11764 if ((cst1 & ncst1) == ncst1
11765 && multiple_of_p (type, arg0,
11766 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11767 return fold_convert_loc (loc, type, arg0);
11770 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11771 bits from CST2. */
11772 if (TREE_CODE (arg1) == INTEGER_CST
11773 && TREE_CODE (arg0) == MULT_EXPR
11774 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11776 double_int masked
11777 = mask_with_tz (type, tree_to_double_int (arg1),
11778 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11780 if (masked.is_zero ())
11781 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11782 arg0, arg1);
11783 else if (masked != tree_to_double_int (arg1))
11784 return fold_build2_loc (loc, code, type, op0,
11785 double_int_to_tree (type, masked));
11788 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11789 ((A & N) + B) & M -> (A + B) & M
11790 Similarly if (N & M) == 0,
11791 ((A | N) + B) & M -> (A + B) & M
11792 and for - instead of + (or unary - instead of +)
11793 and/or ^ instead of |.
11794 If B is constant and (B & M) == 0, fold into A & M. */
11795 if (host_integerp (arg1, 1))
11797 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11798 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11799 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11800 && (TREE_CODE (arg0) == PLUS_EXPR
11801 || TREE_CODE (arg0) == MINUS_EXPR
11802 || TREE_CODE (arg0) == NEGATE_EXPR)
11803 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11804 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11806 tree pmop[2];
11807 int which = 0;
11808 unsigned HOST_WIDE_INT cst0;
11810 /* Now we know that arg0 is (C + D) or (C - D) or
11811 -C and arg1 (M) is == (1LL << cst) - 1.
11812 Store C into PMOP[0] and D into PMOP[1]. */
11813 pmop[0] = TREE_OPERAND (arg0, 0);
11814 pmop[1] = NULL;
11815 if (TREE_CODE (arg0) != NEGATE_EXPR)
11817 pmop[1] = TREE_OPERAND (arg0, 1);
11818 which = 1;
11821 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11822 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11823 & cst1) != cst1)
11824 which = -1;
11826 for (; which >= 0; which--)
11827 switch (TREE_CODE (pmop[which]))
11829 case BIT_AND_EXPR:
11830 case BIT_IOR_EXPR:
11831 case BIT_XOR_EXPR:
11832 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11833 != INTEGER_CST)
11834 break;
11835 /* tree_low_cst not used, because we don't care about
11836 the upper bits. */
11837 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11838 cst0 &= cst1;
11839 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11841 if (cst0 != cst1)
11842 break;
11844 else if (cst0 != 0)
11845 break;
11846 /* If C or D is of the form (A & N) where
11847 (N & M) == M, or of the form (A | N) or
11848 (A ^ N) where (N & M) == 0, replace it with A. */
11849 pmop[which] = TREE_OPERAND (pmop[which], 0);
11850 break;
11851 case INTEGER_CST:
11852 /* If C or D is a N where (N & M) == 0, it can be
11853 omitted (assumed 0). */
11854 if ((TREE_CODE (arg0) == PLUS_EXPR
11855 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11856 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11857 pmop[which] = NULL;
11858 break;
11859 default:
11860 break;
11863 /* Only build anything new if we optimized one or both arguments
11864 above. */
11865 if (pmop[0] != TREE_OPERAND (arg0, 0)
11866 || (TREE_CODE (arg0) != NEGATE_EXPR
11867 && pmop[1] != TREE_OPERAND (arg0, 1)))
11869 tree utype = TREE_TYPE (arg0);
11870 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11872 /* Perform the operations in a type that has defined
11873 overflow behavior. */
11874 utype = unsigned_type_for (TREE_TYPE (arg0));
11875 if (pmop[0] != NULL)
11876 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11877 if (pmop[1] != NULL)
11878 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11881 if (TREE_CODE (arg0) == NEGATE_EXPR)
11882 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11883 else if (TREE_CODE (arg0) == PLUS_EXPR)
11885 if (pmop[0] != NULL && pmop[1] != NULL)
11886 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11887 pmop[0], pmop[1]);
11888 else if (pmop[0] != NULL)
11889 tem = pmop[0];
11890 else if (pmop[1] != NULL)
11891 tem = pmop[1];
11892 else
11893 return build_int_cst (type, 0);
11895 else if (pmop[0] == NULL)
11896 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11897 else
11898 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11899 pmop[0], pmop[1]);
11900 /* TEM is now the new binary +, - or unary - replacement. */
11901 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11902 fold_convert_loc (loc, utype, arg1));
11903 return fold_convert_loc (loc, type, tem);
11908 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11909 if (t1 != NULL_TREE)
11910 return t1;
11911 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11912 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11913 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11915 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11917 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11918 && (~TREE_INT_CST_LOW (arg1)
11919 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11920 return
11921 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11924 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11926 This results in more efficient code for machines without a NOR
11927 instruction. Combine will canonicalize to the first form
11928 which will allow use of NOR instructions provided by the
11929 backend if they exist. */
11930 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11931 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11933 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11934 build2 (BIT_IOR_EXPR, type,
11935 fold_convert_loc (loc, type,
11936 TREE_OPERAND (arg0, 0)),
11937 fold_convert_loc (loc, type,
11938 TREE_OPERAND (arg1, 0))));
11941 /* If arg0 is derived from the address of an object or function, we may
11942 be able to fold this expression using the object or function's
11943 alignment. */
11944 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11946 unsigned HOST_WIDE_INT modulus, residue;
11947 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11949 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11950 integer_onep (arg1));
11952 /* This works because modulus is a power of 2. If this weren't the
11953 case, we'd have to replace it by its greatest power-of-2
11954 divisor: modulus & -modulus. */
11955 if (low < modulus)
11956 return build_int_cst (type, residue & low);
11959 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11960 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11961 if the new mask might be further optimized. */
11962 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11963 || TREE_CODE (arg0) == RSHIFT_EXPR)
11964 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11965 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11966 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11967 < TYPE_PRECISION (TREE_TYPE (arg0))
11968 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11969 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11971 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11972 unsigned HOST_WIDE_INT mask
11973 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11974 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11975 tree shift_type = TREE_TYPE (arg0);
11977 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11978 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11979 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11980 && TYPE_PRECISION (TREE_TYPE (arg0))
11981 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11983 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11984 tree arg00 = TREE_OPERAND (arg0, 0);
11985 /* See if more bits can be proven as zero because of
11986 zero extension. */
11987 if (TREE_CODE (arg00) == NOP_EXPR
11988 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11990 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11991 if (TYPE_PRECISION (inner_type)
11992 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11993 && TYPE_PRECISION (inner_type) < prec)
11995 prec = TYPE_PRECISION (inner_type);
11996 /* See if we can shorten the right shift. */
11997 if (shiftc < prec)
11998 shift_type = inner_type;
12001 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12002 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12003 zerobits <<= prec - shiftc;
12004 /* For arithmetic shift if sign bit could be set, zerobits
12005 can contain actually sign bits, so no transformation is
12006 possible, unless MASK masks them all away. In that
12007 case the shift needs to be converted into logical shift. */
12008 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12009 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12011 if ((mask & zerobits) == 0)
12012 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12013 else
12014 zerobits = 0;
12018 /* ((X << 16) & 0xff00) is (X, 0). */
12019 if ((mask & zerobits) == mask)
12020 return omit_one_operand_loc (loc, type,
12021 build_int_cst (type, 0), arg0);
12023 newmask = mask | zerobits;
12024 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12026 /* Only do the transformation if NEWMASK is some integer
12027 mode's mask. */
12028 for (prec = BITS_PER_UNIT;
12029 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12030 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12031 break;
12032 if (prec < HOST_BITS_PER_WIDE_INT
12033 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12035 tree newmaskt;
12037 if (shift_type != TREE_TYPE (arg0))
12039 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12040 fold_convert_loc (loc, shift_type,
12041 TREE_OPERAND (arg0, 0)),
12042 TREE_OPERAND (arg0, 1));
12043 tem = fold_convert_loc (loc, type, tem);
12045 else
12046 tem = op0;
12047 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12048 if (!tree_int_cst_equal (newmaskt, arg1))
12049 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12054 goto associate;
12056 case RDIV_EXPR:
12057 /* Don't touch a floating-point divide by zero unless the mode
12058 of the constant can represent infinity. */
12059 if (TREE_CODE (arg1) == REAL_CST
12060 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12061 && real_zerop (arg1))
12062 return NULL_TREE;
12064 /* Optimize A / A to 1.0 if we don't care about
12065 NaNs or Infinities. Skip the transformation
12066 for non-real operands. */
12067 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12068 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12069 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12070 && operand_equal_p (arg0, arg1, 0))
12072 tree r = build_real (TREE_TYPE (arg0), dconst1);
12074 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12077 /* The complex version of the above A / A optimization. */
12078 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12079 && operand_equal_p (arg0, arg1, 0))
12081 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12082 if (! HONOR_NANS (TYPE_MODE (elem_type))
12083 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12085 tree r = build_real (elem_type, dconst1);
12086 /* omit_two_operands will call fold_convert for us. */
12087 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12091 /* (-A) / (-B) -> A / B */
12092 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12093 return fold_build2_loc (loc, RDIV_EXPR, type,
12094 TREE_OPERAND (arg0, 0),
12095 negate_expr (arg1));
12096 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12097 return fold_build2_loc (loc, RDIV_EXPR, type,
12098 negate_expr (arg0),
12099 TREE_OPERAND (arg1, 0));
12101 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12102 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12103 && real_onep (arg1))
12104 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12106 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12107 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12108 && real_minus_onep (arg1))
12109 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12110 negate_expr (arg0)));
12112 /* If ARG1 is a constant, we can convert this to a multiply by the
12113 reciprocal. This does not have the same rounding properties,
12114 so only do this if -freciprocal-math. We can actually
12115 always safely do it if ARG1 is a power of two, but it's hard to
12116 tell if it is or not in a portable manner. */
12117 if (optimize
12118 && (TREE_CODE (arg1) == REAL_CST
12119 || (TREE_CODE (arg1) == COMPLEX_CST
12120 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12121 || (TREE_CODE (arg1) == VECTOR_CST
12122 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12124 if (flag_reciprocal_math
12125 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12126 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12127 /* Find the reciprocal if optimizing and the result is exact.
12128 TODO: Complex reciprocal not implemented. */
12129 if (TREE_CODE (arg1) != COMPLEX_CST)
12131 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12133 if (inverse)
12134 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12137 /* Convert A/B/C to A/(B*C). */
12138 if (flag_reciprocal_math
12139 && TREE_CODE (arg0) == RDIV_EXPR)
12140 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12141 fold_build2_loc (loc, MULT_EXPR, type,
12142 TREE_OPERAND (arg0, 1), arg1));
12144 /* Convert A/(B/C) to (A/B)*C. */
12145 if (flag_reciprocal_math
12146 && TREE_CODE (arg1) == RDIV_EXPR)
12147 return fold_build2_loc (loc, MULT_EXPR, type,
12148 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12149 TREE_OPERAND (arg1, 0)),
12150 TREE_OPERAND (arg1, 1));
12152 /* Convert C1/(X*C2) into (C1/C2)/X. */
12153 if (flag_reciprocal_math
12154 && TREE_CODE (arg1) == MULT_EXPR
12155 && TREE_CODE (arg0) == REAL_CST
12156 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12158 tree tem = const_binop (RDIV_EXPR, arg0,
12159 TREE_OPERAND (arg1, 1));
12160 if (tem)
12161 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12162 TREE_OPERAND (arg1, 0));
12165 if (flag_unsafe_math_optimizations)
12167 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12168 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12170 /* Optimize sin(x)/cos(x) as tan(x). */
12171 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12172 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12173 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12174 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12175 CALL_EXPR_ARG (arg1, 0), 0))
12177 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12179 if (tanfn != NULL_TREE)
12180 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12183 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12184 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12185 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12186 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12187 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12188 CALL_EXPR_ARG (arg1, 0), 0))
12190 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12192 if (tanfn != NULL_TREE)
12194 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12195 CALL_EXPR_ARG (arg0, 0));
12196 return fold_build2_loc (loc, RDIV_EXPR, type,
12197 build_real (type, dconst1), tmp);
12201 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12202 NaNs or Infinities. */
12203 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12204 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12205 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12207 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12208 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12210 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12211 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12212 && operand_equal_p (arg00, arg01, 0))
12214 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12216 if (cosfn != NULL_TREE)
12217 return build_call_expr_loc (loc, cosfn, 1, arg00);
12221 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12222 NaNs or Infinities. */
12223 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12224 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12225 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12227 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12228 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12230 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12231 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12232 && operand_equal_p (arg00, arg01, 0))
12234 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12236 if (cosfn != NULL_TREE)
12238 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12239 return fold_build2_loc (loc, RDIV_EXPR, type,
12240 build_real (type, dconst1),
12241 tmp);
12246 /* Optimize pow(x,c)/x as pow(x,c-1). */
12247 if (fcode0 == BUILT_IN_POW
12248 || fcode0 == BUILT_IN_POWF
12249 || fcode0 == BUILT_IN_POWL)
12251 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12252 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12253 if (TREE_CODE (arg01) == REAL_CST
12254 && !TREE_OVERFLOW (arg01)
12255 && operand_equal_p (arg1, arg00, 0))
12257 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12258 REAL_VALUE_TYPE c;
12259 tree arg;
12261 c = TREE_REAL_CST (arg01);
12262 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12263 arg = build_real (type, c);
12264 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12268 /* Optimize a/root(b/c) into a*root(c/b). */
12269 if (BUILTIN_ROOT_P (fcode1))
12271 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12273 if (TREE_CODE (rootarg) == RDIV_EXPR)
12275 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12276 tree b = TREE_OPERAND (rootarg, 0);
12277 tree c = TREE_OPERAND (rootarg, 1);
12279 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12281 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12282 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12286 /* Optimize x/expN(y) into x*expN(-y). */
12287 if (BUILTIN_EXPONENT_P (fcode1))
12289 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12290 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12291 arg1 = build_call_expr_loc (loc,
12292 expfn, 1,
12293 fold_convert_loc (loc, type, arg));
12294 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12297 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12298 if (fcode1 == BUILT_IN_POW
12299 || fcode1 == BUILT_IN_POWF
12300 || fcode1 == BUILT_IN_POWL)
12302 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12303 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12304 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12305 tree neg11 = fold_convert_loc (loc, type,
12306 negate_expr (arg11));
12307 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12308 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12311 return NULL_TREE;
12313 case TRUNC_DIV_EXPR:
12314 /* Optimize (X & (-A)) / A where A is a power of 2,
12315 to X >> log2(A) */
12316 if (TREE_CODE (arg0) == BIT_AND_EXPR
12317 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12318 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12320 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12321 arg1, TREE_OPERAND (arg0, 1));
12322 if (sum && integer_zerop (sum)) {
12323 unsigned long pow2;
12325 if (TREE_INT_CST_LOW (arg1))
12326 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12327 else
12328 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12329 + HOST_BITS_PER_WIDE_INT;
12331 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12332 TREE_OPERAND (arg0, 0),
12333 build_int_cst (integer_type_node, pow2));
12337 /* Fall through */
12339 case FLOOR_DIV_EXPR:
12340 /* Simplify A / (B << N) where A and B are positive and B is
12341 a power of 2, to A >> (N + log2(B)). */
12342 strict_overflow_p = false;
12343 if (TREE_CODE (arg1) == LSHIFT_EXPR
12344 && (TYPE_UNSIGNED (type)
12345 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12347 tree sval = TREE_OPERAND (arg1, 0);
12348 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12350 tree sh_cnt = TREE_OPERAND (arg1, 1);
12351 unsigned long pow2;
12353 if (TREE_INT_CST_LOW (sval))
12354 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12355 else
12356 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12357 + HOST_BITS_PER_WIDE_INT;
12359 if (strict_overflow_p)
12360 fold_overflow_warning (("assuming signed overflow does not "
12361 "occur when simplifying A / (B << N)"),
12362 WARN_STRICT_OVERFLOW_MISC);
12364 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12365 sh_cnt,
12366 build_int_cst (TREE_TYPE (sh_cnt),
12367 pow2));
12368 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12369 fold_convert_loc (loc, type, arg0), sh_cnt);
12373 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12374 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12375 if (INTEGRAL_TYPE_P (type)
12376 && TYPE_UNSIGNED (type)
12377 && code == FLOOR_DIV_EXPR)
12378 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12380 /* Fall through */
12382 case ROUND_DIV_EXPR:
12383 case CEIL_DIV_EXPR:
12384 case EXACT_DIV_EXPR:
12385 if (integer_onep (arg1))
12386 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12387 if (integer_zerop (arg1))
12388 return NULL_TREE;
12389 /* X / -1 is -X. */
12390 if (!TYPE_UNSIGNED (type)
12391 && TREE_CODE (arg1) == INTEGER_CST
12392 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12393 && TREE_INT_CST_HIGH (arg1) == -1)
12394 return fold_convert_loc (loc, type, negate_expr (arg0));
12396 /* Convert -A / -B to A / B when the type is signed and overflow is
12397 undefined. */
12398 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12399 && TREE_CODE (arg0) == NEGATE_EXPR
12400 && negate_expr_p (arg1))
12402 if (INTEGRAL_TYPE_P (type))
12403 fold_overflow_warning (("assuming signed overflow does not occur "
12404 "when distributing negation across "
12405 "division"),
12406 WARN_STRICT_OVERFLOW_MISC);
12407 return fold_build2_loc (loc, code, type,
12408 fold_convert_loc (loc, type,
12409 TREE_OPERAND (arg0, 0)),
12410 fold_convert_loc (loc, type,
12411 negate_expr (arg1)));
12413 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12414 && TREE_CODE (arg1) == NEGATE_EXPR
12415 && negate_expr_p (arg0))
12417 if (INTEGRAL_TYPE_P (type))
12418 fold_overflow_warning (("assuming signed overflow does not occur "
12419 "when distributing negation across "
12420 "division"),
12421 WARN_STRICT_OVERFLOW_MISC);
12422 return fold_build2_loc (loc, code, type,
12423 fold_convert_loc (loc, type,
12424 negate_expr (arg0)),
12425 fold_convert_loc (loc, type,
12426 TREE_OPERAND (arg1, 0)));
12429 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12430 operation, EXACT_DIV_EXPR.
12432 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12433 At one time others generated faster code, it's not clear if they do
12434 after the last round to changes to the DIV code in expmed.c. */
12435 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12436 && multiple_of_p (type, arg0, arg1))
12437 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12439 strict_overflow_p = false;
12440 if (TREE_CODE (arg1) == INTEGER_CST
12441 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12442 &strict_overflow_p)))
12444 if (strict_overflow_p)
12445 fold_overflow_warning (("assuming signed overflow does not occur "
12446 "when simplifying division"),
12447 WARN_STRICT_OVERFLOW_MISC);
12448 return fold_convert_loc (loc, type, tem);
12451 return NULL_TREE;
12453 case CEIL_MOD_EXPR:
12454 case FLOOR_MOD_EXPR:
12455 case ROUND_MOD_EXPR:
12456 case TRUNC_MOD_EXPR:
12457 /* X % 1 is always zero, but be sure to preserve any side
12458 effects in X. */
12459 if (integer_onep (arg1))
12460 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12462 /* X % 0, return X % 0 unchanged so that we can get the
12463 proper warnings and errors. */
12464 if (integer_zerop (arg1))
12465 return NULL_TREE;
12467 /* 0 % X is always zero, but be sure to preserve any side
12468 effects in X. Place this after checking for X == 0. */
12469 if (integer_zerop (arg0))
12470 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12472 /* X % -1 is zero. */
12473 if (!TYPE_UNSIGNED (type)
12474 && TREE_CODE (arg1) == INTEGER_CST
12475 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12476 && TREE_INT_CST_HIGH (arg1) == -1)
12477 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12479 /* X % -C is the same as X % C. */
12480 if (code == TRUNC_MOD_EXPR
12481 && !TYPE_UNSIGNED (type)
12482 && TREE_CODE (arg1) == INTEGER_CST
12483 && !TREE_OVERFLOW (arg1)
12484 && TREE_INT_CST_HIGH (arg1) < 0
12485 && !TYPE_OVERFLOW_TRAPS (type)
12486 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12487 && !sign_bit_p (arg1, arg1))
12488 return fold_build2_loc (loc, code, type,
12489 fold_convert_loc (loc, type, arg0),
12490 fold_convert_loc (loc, type,
12491 negate_expr (arg1)));
12493 /* X % -Y is the same as X % Y. */
12494 if (code == TRUNC_MOD_EXPR
12495 && !TYPE_UNSIGNED (type)
12496 && TREE_CODE (arg1) == NEGATE_EXPR
12497 && !TYPE_OVERFLOW_TRAPS (type))
12498 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12499 fold_convert_loc (loc, type,
12500 TREE_OPERAND (arg1, 0)));
12502 strict_overflow_p = false;
12503 if (TREE_CODE (arg1) == INTEGER_CST
12504 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12505 &strict_overflow_p)))
12507 if (strict_overflow_p)
12508 fold_overflow_warning (("assuming signed overflow does not occur "
12509 "when simplifying modulus"),
12510 WARN_STRICT_OVERFLOW_MISC);
12511 return fold_convert_loc (loc, type, tem);
12514 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12515 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12516 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12517 && (TYPE_UNSIGNED (type)
12518 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12520 tree c = arg1;
12521 /* Also optimize A % (C << N) where C is a power of 2,
12522 to A & ((C << N) - 1). */
12523 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12524 c = TREE_OPERAND (arg1, 0);
12526 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12528 tree mask
12529 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12530 build_int_cst (TREE_TYPE (arg1), 1));
12531 if (strict_overflow_p)
12532 fold_overflow_warning (("assuming signed overflow does not "
12533 "occur when simplifying "
12534 "X % (power of two)"),
12535 WARN_STRICT_OVERFLOW_MISC);
12536 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12537 fold_convert_loc (loc, type, arg0),
12538 fold_convert_loc (loc, type, mask));
12542 return NULL_TREE;
12544 case LROTATE_EXPR:
12545 case RROTATE_EXPR:
12546 if (integer_all_onesp (arg0))
12547 return omit_one_operand_loc (loc, type, arg0, arg1);
12548 goto shift;
12550 case RSHIFT_EXPR:
12551 /* Optimize -1 >> x for arithmetic right shifts. */
12552 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12553 && tree_expr_nonnegative_p (arg1))
12554 return omit_one_operand_loc (loc, type, arg0, arg1);
12555 /* ... fall through ... */
12557 case LSHIFT_EXPR:
12558 shift:
12559 if (integer_zerop (arg1))
12560 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12561 if (integer_zerop (arg0))
12562 return omit_one_operand_loc (loc, type, arg0, arg1);
12564 /* Prefer vector1 << scalar to vector1 << vector2
12565 if vector2 is uniform. */
12566 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12567 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12568 return fold_build2_loc (loc, code, type, op0, tem);
12570 /* Since negative shift count is not well-defined,
12571 don't try to compute it in the compiler. */
12572 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12573 return NULL_TREE;
12575 prec = element_precision (type);
12577 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12578 if (TREE_CODE (op0) == code && host_integerp (arg1, true)
12579 && TREE_INT_CST_LOW (arg1) < prec
12580 && host_integerp (TREE_OPERAND (arg0, 1), true)
12581 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12583 unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12584 + TREE_INT_CST_LOW (arg1));
12586 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12587 being well defined. */
12588 if (low >= prec)
12590 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12591 low = low % prec;
12592 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12593 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12594 TREE_OPERAND (arg0, 0));
12595 else
12596 low = prec - 1;
12599 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12600 build_int_cst (TREE_TYPE (arg1), low));
12603 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12604 into x & ((unsigned)-1 >> c) for unsigned types. */
12605 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12606 || (TYPE_UNSIGNED (type)
12607 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12608 && host_integerp (arg1, false)
12609 && TREE_INT_CST_LOW (arg1) < prec
12610 && host_integerp (TREE_OPERAND (arg0, 1), false)
12611 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12613 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12614 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12615 tree lshift;
12616 tree arg00;
12618 if (low0 == low1)
12620 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12622 lshift = build_minus_one_cst (type);
12623 lshift = const_binop (code, lshift, arg1);
12625 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12629 /* Rewrite an LROTATE_EXPR by a constant into an
12630 RROTATE_EXPR by a new constant. */
12631 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12633 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12634 tem = const_binop (MINUS_EXPR, tem, arg1);
12635 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12638 /* If we have a rotate of a bit operation with the rotate count and
12639 the second operand of the bit operation both constant,
12640 permute the two operations. */
12641 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12642 && (TREE_CODE (arg0) == BIT_AND_EXPR
12643 || TREE_CODE (arg0) == BIT_IOR_EXPR
12644 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12645 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12646 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12647 fold_build2_loc (loc, code, type,
12648 TREE_OPERAND (arg0, 0), arg1),
12649 fold_build2_loc (loc, code, type,
12650 TREE_OPERAND (arg0, 1), arg1));
12652 /* Two consecutive rotates adding up to the precision of the
12653 type can be ignored. */
12654 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12655 && TREE_CODE (arg0) == RROTATE_EXPR
12656 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12657 && TREE_INT_CST_HIGH (arg1) == 0
12658 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12659 && ((TREE_INT_CST_LOW (arg1)
12660 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12661 == prec))
12662 return TREE_OPERAND (arg0, 0);
12664 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12665 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12666 if the latter can be further optimized. */
12667 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12668 && TREE_CODE (arg0) == BIT_AND_EXPR
12669 && TREE_CODE (arg1) == INTEGER_CST
12670 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12672 tree mask = fold_build2_loc (loc, code, type,
12673 fold_convert_loc (loc, type,
12674 TREE_OPERAND (arg0, 1)),
12675 arg1);
12676 tree shift = fold_build2_loc (loc, code, type,
12677 fold_convert_loc (loc, type,
12678 TREE_OPERAND (arg0, 0)),
12679 arg1);
12680 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12681 if (tem)
12682 return tem;
12685 return NULL_TREE;
12687 case MIN_EXPR:
12688 if (operand_equal_p (arg0, arg1, 0))
12689 return omit_one_operand_loc (loc, type, arg0, arg1);
12690 if (INTEGRAL_TYPE_P (type)
12691 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12692 return omit_one_operand_loc (loc, type, arg1, arg0);
12693 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12694 if (tem)
12695 return tem;
12696 goto associate;
12698 case MAX_EXPR:
12699 if (operand_equal_p (arg0, arg1, 0))
12700 return omit_one_operand_loc (loc, type, arg0, arg1);
12701 if (INTEGRAL_TYPE_P (type)
12702 && TYPE_MAX_VALUE (type)
12703 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12704 return omit_one_operand_loc (loc, type, arg1, arg0);
12705 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12706 if (tem)
12707 return tem;
12708 goto associate;
12710 case TRUTH_ANDIF_EXPR:
12711 /* Note that the operands of this must be ints
12712 and their values must be 0 or 1.
12713 ("true" is a fixed value perhaps depending on the language.) */
12714 /* If first arg is constant zero, return it. */
12715 if (integer_zerop (arg0))
12716 return fold_convert_loc (loc, type, arg0);
12717 case TRUTH_AND_EXPR:
12718 /* If either arg is constant true, drop it. */
12719 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12720 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12721 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12722 /* Preserve sequence points. */
12723 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12724 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12725 /* If second arg is constant zero, result is zero, but first arg
12726 must be evaluated. */
12727 if (integer_zerop (arg1))
12728 return omit_one_operand_loc (loc, type, arg1, arg0);
12729 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12730 case will be handled here. */
12731 if (integer_zerop (arg0))
12732 return omit_one_operand_loc (loc, type, arg0, arg1);
12734 /* !X && X is always false. */
12735 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12736 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12737 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12738 /* X && !X is always false. */
12739 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12740 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12741 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12743 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12744 means A >= Y && A != MAX, but in this case we know that
12745 A < X <= MAX. */
12747 if (!TREE_SIDE_EFFECTS (arg0)
12748 && !TREE_SIDE_EFFECTS (arg1))
12750 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12751 if (tem && !operand_equal_p (tem, arg0, 0))
12752 return fold_build2_loc (loc, code, type, tem, arg1);
12754 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12755 if (tem && !operand_equal_p (tem, arg1, 0))
12756 return fold_build2_loc (loc, code, type, arg0, tem);
12759 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12760 != NULL_TREE)
12761 return tem;
12763 return NULL_TREE;
12765 case TRUTH_ORIF_EXPR:
12766 /* Note that the operands of this must be ints
12767 and their values must be 0 or true.
12768 ("true" is a fixed value perhaps depending on the language.) */
12769 /* If first arg is constant true, return it. */
12770 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12771 return fold_convert_loc (loc, type, arg0);
12772 case TRUTH_OR_EXPR:
12773 /* If either arg is constant zero, drop it. */
12774 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12775 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12776 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12777 /* Preserve sequence points. */
12778 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12779 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12780 /* If second arg is constant true, result is true, but we must
12781 evaluate first arg. */
12782 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12783 return omit_one_operand_loc (loc, type, arg1, arg0);
12784 /* Likewise for first arg, but note this only occurs here for
12785 TRUTH_OR_EXPR. */
12786 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12787 return omit_one_operand_loc (loc, type, arg0, arg1);
12789 /* !X || X is always true. */
12790 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12791 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12792 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12793 /* X || !X is always true. */
12794 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12795 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12796 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12798 /* (X && !Y) || (!X && Y) is X ^ Y */
12799 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12800 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12802 tree a0, a1, l0, l1, n0, n1;
12804 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12805 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12807 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12808 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12810 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12811 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12813 if ((operand_equal_p (n0, a0, 0)
12814 && operand_equal_p (n1, a1, 0))
12815 || (operand_equal_p (n0, a1, 0)
12816 && operand_equal_p (n1, a0, 0)))
12817 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12820 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12821 != NULL_TREE)
12822 return tem;
12824 return NULL_TREE;
12826 case TRUTH_XOR_EXPR:
12827 /* If the second arg is constant zero, drop it. */
12828 if (integer_zerop (arg1))
12829 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12830 /* If the second arg is constant true, this is a logical inversion. */
12831 if (integer_onep (arg1))
12833 tem = invert_truthvalue_loc (loc, arg0);
12834 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12836 /* Identical arguments cancel to zero. */
12837 if (operand_equal_p (arg0, arg1, 0))
12838 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12840 /* !X ^ X is always true. */
12841 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12842 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12843 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12845 /* X ^ !X is always true. */
12846 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12847 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12848 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12850 return NULL_TREE;
12852 case EQ_EXPR:
12853 case NE_EXPR:
12854 STRIP_NOPS (arg0);
12855 STRIP_NOPS (arg1);
12857 tem = fold_comparison (loc, code, type, op0, op1);
12858 if (tem != NULL_TREE)
12859 return tem;
12861 /* bool_var != 0 becomes bool_var. */
12862 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12863 && code == NE_EXPR)
12864 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12866 /* bool_var == 1 becomes bool_var. */
12867 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12868 && code == EQ_EXPR)
12869 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12871 /* bool_var != 1 becomes !bool_var. */
12872 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12873 && code == NE_EXPR)
12874 return fold_convert_loc (loc, type,
12875 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12876 TREE_TYPE (arg0), arg0));
12878 /* bool_var == 0 becomes !bool_var. */
12879 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12880 && code == EQ_EXPR)
12881 return fold_convert_loc (loc, type,
12882 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12883 TREE_TYPE (arg0), arg0));
12885 /* !exp != 0 becomes !exp */
12886 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12887 && code == NE_EXPR)
12888 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12890 /* If this is an equality comparison of the address of two non-weak,
12891 unaliased symbols neither of which are extern (since we do not
12892 have access to attributes for externs), then we know the result. */
12893 if (TREE_CODE (arg0) == ADDR_EXPR
12894 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12895 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12896 && ! lookup_attribute ("alias",
12897 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12898 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12899 && TREE_CODE (arg1) == ADDR_EXPR
12900 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12901 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12902 && ! lookup_attribute ("alias",
12903 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12904 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12906 /* We know that we're looking at the address of two
12907 non-weak, unaliased, static _DECL nodes.
12909 It is both wasteful and incorrect to call operand_equal_p
12910 to compare the two ADDR_EXPR nodes. It is wasteful in that
12911 all we need to do is test pointer equality for the arguments
12912 to the two ADDR_EXPR nodes. It is incorrect to use
12913 operand_equal_p as that function is NOT equivalent to a
12914 C equality test. It can in fact return false for two
12915 objects which would test as equal using the C equality
12916 operator. */
12917 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12918 return constant_boolean_node (equal
12919 ? code == EQ_EXPR : code != EQ_EXPR,
12920 type);
12923 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12924 a MINUS_EXPR of a constant, we can convert it into a comparison with
12925 a revised constant as long as no overflow occurs. */
12926 if (TREE_CODE (arg1) == INTEGER_CST
12927 && (TREE_CODE (arg0) == PLUS_EXPR
12928 || TREE_CODE (arg0) == MINUS_EXPR)
12929 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12930 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12931 ? MINUS_EXPR : PLUS_EXPR,
12932 fold_convert_loc (loc, TREE_TYPE (arg0),
12933 arg1),
12934 TREE_OPERAND (arg0, 1)))
12935 && !TREE_OVERFLOW (tem))
12936 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12938 /* Similarly for a NEGATE_EXPR. */
12939 if (TREE_CODE (arg0) == NEGATE_EXPR
12940 && TREE_CODE (arg1) == INTEGER_CST
12941 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12942 arg1)))
12943 && TREE_CODE (tem) == INTEGER_CST
12944 && !TREE_OVERFLOW (tem))
12945 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12947 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12948 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12949 && TREE_CODE (arg1) == INTEGER_CST
12950 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12951 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12952 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12953 fold_convert_loc (loc,
12954 TREE_TYPE (arg0),
12955 arg1),
12956 TREE_OPERAND (arg0, 1)));
12958 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12959 if ((TREE_CODE (arg0) == PLUS_EXPR
12960 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12961 || TREE_CODE (arg0) == MINUS_EXPR)
12962 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12963 0)),
12964 arg1, 0)
12965 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12966 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12968 tree val = TREE_OPERAND (arg0, 1);
12969 return omit_two_operands_loc (loc, type,
12970 fold_build2_loc (loc, code, type,
12971 val,
12972 build_int_cst (TREE_TYPE (val),
12973 0)),
12974 TREE_OPERAND (arg0, 0), arg1);
12977 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12978 if (TREE_CODE (arg0) == MINUS_EXPR
12979 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12980 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12981 1)),
12982 arg1, 0)
12983 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12985 return omit_two_operands_loc (loc, type,
12986 code == NE_EXPR
12987 ? boolean_true_node : boolean_false_node,
12988 TREE_OPERAND (arg0, 1), arg1);
12991 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12992 for !=. Don't do this for ordered comparisons due to overflow. */
12993 if (TREE_CODE (arg0) == MINUS_EXPR
12994 && integer_zerop (arg1))
12995 return fold_build2_loc (loc, code, type,
12996 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12998 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12999 if (TREE_CODE (arg0) == ABS_EXPR
13000 && (integer_zerop (arg1) || real_zerop (arg1)))
13001 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13003 /* If this is an EQ or NE comparison with zero and ARG0 is
13004 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13005 two operations, but the latter can be done in one less insn
13006 on machines that have only two-operand insns or on which a
13007 constant cannot be the first operand. */
13008 if (TREE_CODE (arg0) == BIT_AND_EXPR
13009 && integer_zerop (arg1))
13011 tree arg00 = TREE_OPERAND (arg0, 0);
13012 tree arg01 = TREE_OPERAND (arg0, 1);
13013 if (TREE_CODE (arg00) == LSHIFT_EXPR
13014 && integer_onep (TREE_OPERAND (arg00, 0)))
13016 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13017 arg01, TREE_OPERAND (arg00, 1));
13018 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13019 build_int_cst (TREE_TYPE (arg0), 1));
13020 return fold_build2_loc (loc, code, type,
13021 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13022 arg1);
13024 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13025 && integer_onep (TREE_OPERAND (arg01, 0)))
13027 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13028 arg00, TREE_OPERAND (arg01, 1));
13029 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13030 build_int_cst (TREE_TYPE (arg0), 1));
13031 return fold_build2_loc (loc, code, type,
13032 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13033 arg1);
13037 /* If this is an NE or EQ comparison of zero against the result of a
13038 signed MOD operation whose second operand is a power of 2, make
13039 the MOD operation unsigned since it is simpler and equivalent. */
13040 if (integer_zerop (arg1)
13041 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13042 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13043 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13044 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13045 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13046 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13048 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13049 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13050 fold_convert_loc (loc, newtype,
13051 TREE_OPERAND (arg0, 0)),
13052 fold_convert_loc (loc, newtype,
13053 TREE_OPERAND (arg0, 1)));
13055 return fold_build2_loc (loc, code, type, newmod,
13056 fold_convert_loc (loc, newtype, arg1));
13059 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13060 C1 is a valid shift constant, and C2 is a power of two, i.e.
13061 a single bit. */
13062 if (TREE_CODE (arg0) == BIT_AND_EXPR
13063 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13064 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13065 == INTEGER_CST
13066 && integer_pow2p (TREE_OPERAND (arg0, 1))
13067 && integer_zerop (arg1))
13069 tree itype = TREE_TYPE (arg0);
13070 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13071 prec = TYPE_PRECISION (itype);
13073 /* Check for a valid shift count. */
13074 if (TREE_INT_CST_HIGH (arg001) == 0
13075 && TREE_INT_CST_LOW (arg001) < prec)
13077 tree arg01 = TREE_OPERAND (arg0, 1);
13078 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13079 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13080 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13081 can be rewritten as (X & (C2 << C1)) != 0. */
13082 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13084 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13085 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13086 return fold_build2_loc (loc, code, type, tem,
13087 fold_convert_loc (loc, itype, arg1));
13089 /* Otherwise, for signed (arithmetic) shifts,
13090 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13091 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13092 else if (!TYPE_UNSIGNED (itype))
13093 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13094 arg000, build_int_cst (itype, 0));
13095 /* Otherwise, of unsigned (logical) shifts,
13096 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13097 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13098 else
13099 return omit_one_operand_loc (loc, type,
13100 code == EQ_EXPR ? integer_one_node
13101 : integer_zero_node,
13102 arg000);
13106 /* If we have (A & C) == C where C is a power of 2, convert this into
13107 (A & C) != 0. Similarly for NE_EXPR. */
13108 if (TREE_CODE (arg0) == BIT_AND_EXPR
13109 && integer_pow2p (TREE_OPERAND (arg0, 1))
13110 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13111 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13112 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13113 integer_zero_node));
13115 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13116 bit, then fold the expression into A < 0 or A >= 0. */
13117 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13118 if (tem)
13119 return tem;
13121 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13122 Similarly for NE_EXPR. */
13123 if (TREE_CODE (arg0) == BIT_AND_EXPR
13124 && TREE_CODE (arg1) == INTEGER_CST
13125 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13127 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13128 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13129 TREE_OPERAND (arg0, 1));
13130 tree dandnotc
13131 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13132 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13133 notc);
13134 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13135 if (integer_nonzerop (dandnotc))
13136 return omit_one_operand_loc (loc, type, rslt, arg0);
13139 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13140 Similarly for NE_EXPR. */
13141 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13142 && TREE_CODE (arg1) == INTEGER_CST
13143 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13145 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13146 tree candnotd
13147 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13148 TREE_OPERAND (arg0, 1),
13149 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13150 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13151 if (integer_nonzerop (candnotd))
13152 return omit_one_operand_loc (loc, type, rslt, arg0);
13155 /* If this is a comparison of a field, we may be able to simplify it. */
13156 if ((TREE_CODE (arg0) == COMPONENT_REF
13157 || TREE_CODE (arg0) == BIT_FIELD_REF)
13158 /* Handle the constant case even without -O
13159 to make sure the warnings are given. */
13160 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13162 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13163 if (t1)
13164 return t1;
13167 /* Optimize comparisons of strlen vs zero to a compare of the
13168 first character of the string vs zero. To wit,
13169 strlen(ptr) == 0 => *ptr == 0
13170 strlen(ptr) != 0 => *ptr != 0
13171 Other cases should reduce to one of these two (or a constant)
13172 due to the return value of strlen being unsigned. */
13173 if (TREE_CODE (arg0) == CALL_EXPR
13174 && integer_zerop (arg1))
13176 tree fndecl = get_callee_fndecl (arg0);
13178 if (fndecl
13179 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13180 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13181 && call_expr_nargs (arg0) == 1
13182 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13184 tree iref = build_fold_indirect_ref_loc (loc,
13185 CALL_EXPR_ARG (arg0, 0));
13186 return fold_build2_loc (loc, code, type, iref,
13187 build_int_cst (TREE_TYPE (iref), 0));
13191 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13192 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13193 if (TREE_CODE (arg0) == RSHIFT_EXPR
13194 && integer_zerop (arg1)
13195 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13197 tree arg00 = TREE_OPERAND (arg0, 0);
13198 tree arg01 = TREE_OPERAND (arg0, 1);
13199 tree itype = TREE_TYPE (arg00);
13200 if (TREE_INT_CST_HIGH (arg01) == 0
13201 && TREE_INT_CST_LOW (arg01)
13202 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13204 if (TYPE_UNSIGNED (itype))
13206 itype = signed_type_for (itype);
13207 arg00 = fold_convert_loc (loc, itype, arg00);
13209 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13210 type, arg00, build_zero_cst (itype));
13214 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13215 if (integer_zerop (arg1)
13216 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13217 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13218 TREE_OPERAND (arg0, 1));
13220 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13221 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13222 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13223 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13224 build_zero_cst (TREE_TYPE (arg0)));
13225 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13226 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13227 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13228 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13229 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13230 build_zero_cst (TREE_TYPE (arg0)));
13232 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13233 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13234 && TREE_CODE (arg1) == INTEGER_CST
13235 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13236 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13237 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13238 TREE_OPERAND (arg0, 1), arg1));
13240 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13241 (X & C) == 0 when C is a single bit. */
13242 if (TREE_CODE (arg0) == BIT_AND_EXPR
13243 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13244 && integer_zerop (arg1)
13245 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13247 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13248 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13249 TREE_OPERAND (arg0, 1));
13250 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13251 type, tem,
13252 fold_convert_loc (loc, TREE_TYPE (arg0),
13253 arg1));
13256 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13257 constant C is a power of two, i.e. a single bit. */
13258 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13259 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13260 && integer_zerop (arg1)
13261 && integer_pow2p (TREE_OPERAND (arg0, 1))
13262 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13263 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13265 tree arg00 = TREE_OPERAND (arg0, 0);
13266 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13267 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13270 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13271 when is C is a power of two, i.e. a single bit. */
13272 if (TREE_CODE (arg0) == BIT_AND_EXPR
13273 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13274 && integer_zerop (arg1)
13275 && integer_pow2p (TREE_OPERAND (arg0, 1))
13276 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13277 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13279 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13280 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13281 arg000, TREE_OPERAND (arg0, 1));
13282 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13283 tem, build_int_cst (TREE_TYPE (tem), 0));
13286 if (integer_zerop (arg1)
13287 && tree_expr_nonzero_p (arg0))
13289 tree res = constant_boolean_node (code==NE_EXPR, type);
13290 return omit_one_operand_loc (loc, type, res, arg0);
13293 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13294 if (TREE_CODE (arg0) == NEGATE_EXPR
13295 && TREE_CODE (arg1) == NEGATE_EXPR)
13296 return fold_build2_loc (loc, code, type,
13297 TREE_OPERAND (arg0, 0),
13298 fold_convert_loc (loc, TREE_TYPE (arg0),
13299 TREE_OPERAND (arg1, 0)));
13301 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13302 if (TREE_CODE (arg0) == BIT_AND_EXPR
13303 && TREE_CODE (arg1) == BIT_AND_EXPR)
13305 tree arg00 = TREE_OPERAND (arg0, 0);
13306 tree arg01 = TREE_OPERAND (arg0, 1);
13307 tree arg10 = TREE_OPERAND (arg1, 0);
13308 tree arg11 = TREE_OPERAND (arg1, 1);
13309 tree itype = TREE_TYPE (arg0);
13311 if (operand_equal_p (arg01, arg11, 0))
13312 return fold_build2_loc (loc, code, type,
13313 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13314 fold_build2_loc (loc,
13315 BIT_XOR_EXPR, itype,
13316 arg00, arg10),
13317 arg01),
13318 build_zero_cst (itype));
13320 if (operand_equal_p (arg01, arg10, 0))
13321 return fold_build2_loc (loc, code, type,
13322 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13323 fold_build2_loc (loc,
13324 BIT_XOR_EXPR, itype,
13325 arg00, arg11),
13326 arg01),
13327 build_zero_cst (itype));
13329 if (operand_equal_p (arg00, arg11, 0))
13330 return fold_build2_loc (loc, code, type,
13331 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13332 fold_build2_loc (loc,
13333 BIT_XOR_EXPR, itype,
13334 arg01, arg10),
13335 arg00),
13336 build_zero_cst (itype));
13338 if (operand_equal_p (arg00, arg10, 0))
13339 return fold_build2_loc (loc, code, type,
13340 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13341 fold_build2_loc (loc,
13342 BIT_XOR_EXPR, itype,
13343 arg01, arg11),
13344 arg00),
13345 build_zero_cst (itype));
13348 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13349 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13351 tree arg00 = TREE_OPERAND (arg0, 0);
13352 tree arg01 = TREE_OPERAND (arg0, 1);
13353 tree arg10 = TREE_OPERAND (arg1, 0);
13354 tree arg11 = TREE_OPERAND (arg1, 1);
13355 tree itype = TREE_TYPE (arg0);
13357 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13358 operand_equal_p guarantees no side-effects so we don't need
13359 to use omit_one_operand on Z. */
13360 if (operand_equal_p (arg01, arg11, 0))
13361 return fold_build2_loc (loc, code, type, arg00,
13362 fold_convert_loc (loc, TREE_TYPE (arg00),
13363 arg10));
13364 if (operand_equal_p (arg01, arg10, 0))
13365 return fold_build2_loc (loc, code, type, arg00,
13366 fold_convert_loc (loc, TREE_TYPE (arg00),
13367 arg11));
13368 if (operand_equal_p (arg00, arg11, 0))
13369 return fold_build2_loc (loc, code, type, arg01,
13370 fold_convert_loc (loc, TREE_TYPE (arg01),
13371 arg10));
13372 if (operand_equal_p (arg00, arg10, 0))
13373 return fold_build2_loc (loc, code, type, arg01,
13374 fold_convert_loc (loc, TREE_TYPE (arg01),
13375 arg11));
13377 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13378 if (TREE_CODE (arg01) == INTEGER_CST
13379 && TREE_CODE (arg11) == INTEGER_CST)
13381 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13382 fold_convert_loc (loc, itype, arg11));
13383 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13384 return fold_build2_loc (loc, code, type, tem,
13385 fold_convert_loc (loc, itype, arg10));
13389 /* Attempt to simplify equality/inequality comparisons of complex
13390 values. Only lower the comparison if the result is known or
13391 can be simplified to a single scalar comparison. */
13392 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13393 || TREE_CODE (arg0) == COMPLEX_CST)
13394 && (TREE_CODE (arg1) == COMPLEX_EXPR
13395 || TREE_CODE (arg1) == COMPLEX_CST))
13397 tree real0, imag0, real1, imag1;
13398 tree rcond, icond;
13400 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13402 real0 = TREE_OPERAND (arg0, 0);
13403 imag0 = TREE_OPERAND (arg0, 1);
13405 else
13407 real0 = TREE_REALPART (arg0);
13408 imag0 = TREE_IMAGPART (arg0);
13411 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13413 real1 = TREE_OPERAND (arg1, 0);
13414 imag1 = TREE_OPERAND (arg1, 1);
13416 else
13418 real1 = TREE_REALPART (arg1);
13419 imag1 = TREE_IMAGPART (arg1);
13422 rcond = fold_binary_loc (loc, code, type, real0, real1);
13423 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13425 if (integer_zerop (rcond))
13427 if (code == EQ_EXPR)
13428 return omit_two_operands_loc (loc, type, boolean_false_node,
13429 imag0, imag1);
13430 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13432 else
13434 if (code == NE_EXPR)
13435 return omit_two_operands_loc (loc, type, boolean_true_node,
13436 imag0, imag1);
13437 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13441 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13442 if (icond && TREE_CODE (icond) == INTEGER_CST)
13444 if (integer_zerop (icond))
13446 if (code == EQ_EXPR)
13447 return omit_two_operands_loc (loc, type, boolean_false_node,
13448 real0, real1);
13449 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13451 else
13453 if (code == NE_EXPR)
13454 return omit_two_operands_loc (loc, type, boolean_true_node,
13455 real0, real1);
13456 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13461 return NULL_TREE;
13463 case LT_EXPR:
13464 case GT_EXPR:
13465 case LE_EXPR:
13466 case GE_EXPR:
13467 tem = fold_comparison (loc, code, type, op0, op1);
13468 if (tem != NULL_TREE)
13469 return tem;
13471 /* Transform comparisons of the form X +- C CMP X. */
13472 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13473 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13474 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13475 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13476 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13477 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13479 tree arg01 = TREE_OPERAND (arg0, 1);
13480 enum tree_code code0 = TREE_CODE (arg0);
13481 int is_positive;
13483 if (TREE_CODE (arg01) == REAL_CST)
13484 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13485 else
13486 is_positive = tree_int_cst_sgn (arg01);
13488 /* (X - c) > X becomes false. */
13489 if (code == GT_EXPR
13490 && ((code0 == MINUS_EXPR && is_positive >= 0)
13491 || (code0 == PLUS_EXPR && is_positive <= 0)))
13493 if (TREE_CODE (arg01) == INTEGER_CST
13494 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13495 fold_overflow_warning (("assuming signed overflow does not "
13496 "occur when assuming that (X - c) > X "
13497 "is always false"),
13498 WARN_STRICT_OVERFLOW_ALL);
13499 return constant_boolean_node (0, type);
13502 /* Likewise (X + c) < X becomes false. */
13503 if (code == LT_EXPR
13504 && ((code0 == PLUS_EXPR && is_positive >= 0)
13505 || (code0 == MINUS_EXPR && is_positive <= 0)))
13507 if (TREE_CODE (arg01) == INTEGER_CST
13508 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13509 fold_overflow_warning (("assuming signed overflow does not "
13510 "occur when assuming that "
13511 "(X + c) < X is always false"),
13512 WARN_STRICT_OVERFLOW_ALL);
13513 return constant_boolean_node (0, type);
13516 /* Convert (X - c) <= X to true. */
13517 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13518 && code == LE_EXPR
13519 && ((code0 == MINUS_EXPR && is_positive >= 0)
13520 || (code0 == PLUS_EXPR && is_positive <= 0)))
13522 if (TREE_CODE (arg01) == INTEGER_CST
13523 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13524 fold_overflow_warning (("assuming signed overflow does not "
13525 "occur when assuming that "
13526 "(X - c) <= X is always true"),
13527 WARN_STRICT_OVERFLOW_ALL);
13528 return constant_boolean_node (1, type);
13531 /* Convert (X + c) >= X to true. */
13532 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13533 && code == GE_EXPR
13534 && ((code0 == PLUS_EXPR && is_positive >= 0)
13535 || (code0 == MINUS_EXPR && is_positive <= 0)))
13537 if (TREE_CODE (arg01) == INTEGER_CST
13538 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13539 fold_overflow_warning (("assuming signed overflow does not "
13540 "occur when assuming that "
13541 "(X + c) >= X is always true"),
13542 WARN_STRICT_OVERFLOW_ALL);
13543 return constant_boolean_node (1, type);
13546 if (TREE_CODE (arg01) == INTEGER_CST)
13548 /* Convert X + c > X and X - c < X to true for integers. */
13549 if (code == GT_EXPR
13550 && ((code0 == PLUS_EXPR && is_positive > 0)
13551 || (code0 == MINUS_EXPR && is_positive < 0)))
13553 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13554 fold_overflow_warning (("assuming signed overflow does "
13555 "not occur when assuming that "
13556 "(X + c) > X is always true"),
13557 WARN_STRICT_OVERFLOW_ALL);
13558 return constant_boolean_node (1, type);
13561 if (code == LT_EXPR
13562 && ((code0 == MINUS_EXPR && is_positive > 0)
13563 || (code0 == PLUS_EXPR && is_positive < 0)))
13565 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13566 fold_overflow_warning (("assuming signed overflow does "
13567 "not occur when assuming that "
13568 "(X - c) < X is always true"),
13569 WARN_STRICT_OVERFLOW_ALL);
13570 return constant_boolean_node (1, type);
13573 /* Convert X + c <= X and X - c >= X to false for integers. */
13574 if (code == LE_EXPR
13575 && ((code0 == PLUS_EXPR && is_positive > 0)
13576 || (code0 == MINUS_EXPR && is_positive < 0)))
13578 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13579 fold_overflow_warning (("assuming signed overflow does "
13580 "not occur when assuming that "
13581 "(X + c) <= X is always false"),
13582 WARN_STRICT_OVERFLOW_ALL);
13583 return constant_boolean_node (0, type);
13586 if (code == GE_EXPR
13587 && ((code0 == MINUS_EXPR && is_positive > 0)
13588 || (code0 == PLUS_EXPR && is_positive < 0)))
13590 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13591 fold_overflow_warning (("assuming signed overflow does "
13592 "not occur when assuming that "
13593 "(X - c) >= X is always false"),
13594 WARN_STRICT_OVERFLOW_ALL);
13595 return constant_boolean_node (0, type);
13600 /* Comparisons with the highest or lowest possible integer of
13601 the specified precision will have known values. */
13603 tree arg1_type = TREE_TYPE (arg1);
13604 unsigned int width = TYPE_PRECISION (arg1_type);
13606 if (TREE_CODE (arg1) == INTEGER_CST
13607 && width <= HOST_BITS_PER_DOUBLE_INT
13608 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13610 HOST_WIDE_INT signed_max_hi;
13611 unsigned HOST_WIDE_INT signed_max_lo;
13612 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13614 if (width <= HOST_BITS_PER_WIDE_INT)
13616 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13617 - 1;
13618 signed_max_hi = 0;
13619 max_hi = 0;
13621 if (TYPE_UNSIGNED (arg1_type))
13623 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13624 min_lo = 0;
13625 min_hi = 0;
13627 else
13629 max_lo = signed_max_lo;
13630 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13631 min_hi = -1;
13634 else
13636 width -= HOST_BITS_PER_WIDE_INT;
13637 signed_max_lo = -1;
13638 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13639 - 1;
13640 max_lo = -1;
13641 min_lo = 0;
13643 if (TYPE_UNSIGNED (arg1_type))
13645 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13646 min_hi = 0;
13648 else
13650 max_hi = signed_max_hi;
13651 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13655 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13656 && TREE_INT_CST_LOW (arg1) == max_lo)
13657 switch (code)
13659 case GT_EXPR:
13660 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13662 case GE_EXPR:
13663 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13665 case LE_EXPR:
13666 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13668 case LT_EXPR:
13669 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13671 /* The GE_EXPR and LT_EXPR cases above are not normally
13672 reached because of previous transformations. */
13674 default:
13675 break;
13677 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13678 == max_hi
13679 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13680 switch (code)
13682 case GT_EXPR:
13683 arg1 = const_binop (PLUS_EXPR, arg1,
13684 build_int_cst (TREE_TYPE (arg1), 1));
13685 return fold_build2_loc (loc, EQ_EXPR, type,
13686 fold_convert_loc (loc,
13687 TREE_TYPE (arg1), arg0),
13688 arg1);
13689 case LE_EXPR:
13690 arg1 = const_binop (PLUS_EXPR, arg1,
13691 build_int_cst (TREE_TYPE (arg1), 1));
13692 return fold_build2_loc (loc, NE_EXPR, type,
13693 fold_convert_loc (loc, TREE_TYPE (arg1),
13694 arg0),
13695 arg1);
13696 default:
13697 break;
13699 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13700 == min_hi
13701 && TREE_INT_CST_LOW (arg1) == min_lo)
13702 switch (code)
13704 case LT_EXPR:
13705 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13707 case LE_EXPR:
13708 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13710 case GE_EXPR:
13711 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13713 case GT_EXPR:
13714 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13716 default:
13717 break;
13719 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13720 == min_hi
13721 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13722 switch (code)
13724 case GE_EXPR:
13725 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13726 return fold_build2_loc (loc, NE_EXPR, type,
13727 fold_convert_loc (loc,
13728 TREE_TYPE (arg1), arg0),
13729 arg1);
13730 case LT_EXPR:
13731 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13732 return fold_build2_loc (loc, EQ_EXPR, type,
13733 fold_convert_loc (loc, TREE_TYPE (arg1),
13734 arg0),
13735 arg1);
13736 default:
13737 break;
13740 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13741 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13742 && TYPE_UNSIGNED (arg1_type)
13743 /* We will flip the signedness of the comparison operator
13744 associated with the mode of arg1, so the sign bit is
13745 specified by this mode. Check that arg1 is the signed
13746 max associated with this sign bit. */
13747 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13748 /* signed_type does not work on pointer types. */
13749 && INTEGRAL_TYPE_P (arg1_type))
13751 /* The following case also applies to X < signed_max+1
13752 and X >= signed_max+1 because previous transformations. */
13753 if (code == LE_EXPR || code == GT_EXPR)
13755 tree st;
13756 st = signed_type_for (TREE_TYPE (arg1));
13757 return fold_build2_loc (loc,
13758 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13759 type, fold_convert_loc (loc, st, arg0),
13760 build_int_cst (st, 0));
13766 /* If we are comparing an ABS_EXPR with a constant, we can
13767 convert all the cases into explicit comparisons, but they may
13768 well not be faster than doing the ABS and one comparison.
13769 But ABS (X) <= C is a range comparison, which becomes a subtraction
13770 and a comparison, and is probably faster. */
13771 if (code == LE_EXPR
13772 && TREE_CODE (arg1) == INTEGER_CST
13773 && TREE_CODE (arg0) == ABS_EXPR
13774 && ! TREE_SIDE_EFFECTS (arg0)
13775 && (0 != (tem = negate_expr (arg1)))
13776 && TREE_CODE (tem) == INTEGER_CST
13777 && !TREE_OVERFLOW (tem))
13778 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13779 build2 (GE_EXPR, type,
13780 TREE_OPERAND (arg0, 0), tem),
13781 build2 (LE_EXPR, type,
13782 TREE_OPERAND (arg0, 0), arg1));
13784 /* Convert ABS_EXPR<x> >= 0 to true. */
13785 strict_overflow_p = false;
13786 if (code == GE_EXPR
13787 && (integer_zerop (arg1)
13788 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13789 && real_zerop (arg1)))
13790 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13792 if (strict_overflow_p)
13793 fold_overflow_warning (("assuming signed overflow does not occur "
13794 "when simplifying comparison of "
13795 "absolute value and zero"),
13796 WARN_STRICT_OVERFLOW_CONDITIONAL);
13797 return omit_one_operand_loc (loc, type,
13798 constant_boolean_node (true, type),
13799 arg0);
13802 /* Convert ABS_EXPR<x> < 0 to false. */
13803 strict_overflow_p = false;
13804 if (code == LT_EXPR
13805 && (integer_zerop (arg1) || real_zerop (arg1))
13806 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13808 if (strict_overflow_p)
13809 fold_overflow_warning (("assuming signed overflow does not occur "
13810 "when simplifying comparison of "
13811 "absolute value and zero"),
13812 WARN_STRICT_OVERFLOW_CONDITIONAL);
13813 return omit_one_operand_loc (loc, type,
13814 constant_boolean_node (false, type),
13815 arg0);
13818 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13819 and similarly for >= into !=. */
13820 if ((code == LT_EXPR || code == GE_EXPR)
13821 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13822 && TREE_CODE (arg1) == LSHIFT_EXPR
13823 && integer_onep (TREE_OPERAND (arg1, 0)))
13824 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13825 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13826 TREE_OPERAND (arg1, 1)),
13827 build_zero_cst (TREE_TYPE (arg0)));
13829 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13830 otherwise Y might be >= # of bits in X's type and thus e.g.
13831 (unsigned char) (1 << Y) for Y 15 might be 0.
13832 If the cast is widening, then 1 << Y should have unsigned type,
13833 otherwise if Y is number of bits in the signed shift type minus 1,
13834 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13835 31 might be 0xffffffff80000000. */
13836 if ((code == LT_EXPR || code == GE_EXPR)
13837 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13838 && CONVERT_EXPR_P (arg1)
13839 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13840 && (TYPE_PRECISION (TREE_TYPE (arg1))
13841 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13842 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13843 || (TYPE_PRECISION (TREE_TYPE (arg1))
13844 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13845 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13847 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13848 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13849 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13850 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13851 build_zero_cst (TREE_TYPE (arg0)));
13854 return NULL_TREE;
13856 case UNORDERED_EXPR:
13857 case ORDERED_EXPR:
13858 case UNLT_EXPR:
13859 case UNLE_EXPR:
13860 case UNGT_EXPR:
13861 case UNGE_EXPR:
13862 case UNEQ_EXPR:
13863 case LTGT_EXPR:
13864 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13866 t1 = fold_relational_const (code, type, arg0, arg1);
13867 if (t1 != NULL_TREE)
13868 return t1;
13871 /* If the first operand is NaN, the result is constant. */
13872 if (TREE_CODE (arg0) == REAL_CST
13873 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13874 && (code != LTGT_EXPR || ! flag_trapping_math))
13876 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13877 ? integer_zero_node
13878 : integer_one_node;
13879 return omit_one_operand_loc (loc, type, t1, arg1);
13882 /* If the second operand is NaN, the result is constant. */
13883 if (TREE_CODE (arg1) == REAL_CST
13884 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13885 && (code != LTGT_EXPR || ! flag_trapping_math))
13887 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13888 ? integer_zero_node
13889 : integer_one_node;
13890 return omit_one_operand_loc (loc, type, t1, arg0);
13893 /* Simplify unordered comparison of something with itself. */
13894 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13895 && operand_equal_p (arg0, arg1, 0))
13896 return constant_boolean_node (1, type);
13898 if (code == LTGT_EXPR
13899 && !flag_trapping_math
13900 && operand_equal_p (arg0, arg1, 0))
13901 return constant_boolean_node (0, type);
13903 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13905 tree targ0 = strip_float_extensions (arg0);
13906 tree targ1 = strip_float_extensions (arg1);
13907 tree newtype = TREE_TYPE (targ0);
13909 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13910 newtype = TREE_TYPE (targ1);
13912 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13913 return fold_build2_loc (loc, code, type,
13914 fold_convert_loc (loc, newtype, targ0),
13915 fold_convert_loc (loc, newtype, targ1));
13918 return NULL_TREE;
13920 case COMPOUND_EXPR:
13921 /* When pedantic, a compound expression can be neither an lvalue
13922 nor an integer constant expression. */
13923 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13924 return NULL_TREE;
13925 /* Don't let (0, 0) be null pointer constant. */
13926 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13927 : fold_convert_loc (loc, type, arg1);
13928 return pedantic_non_lvalue_loc (loc, tem);
13930 case COMPLEX_EXPR:
13931 if ((TREE_CODE (arg0) == REAL_CST
13932 && TREE_CODE (arg1) == REAL_CST)
13933 || (TREE_CODE (arg0) == INTEGER_CST
13934 && TREE_CODE (arg1) == INTEGER_CST))
13935 return build_complex (type, arg0, arg1);
13936 if (TREE_CODE (arg0) == REALPART_EXPR
13937 && TREE_CODE (arg1) == IMAGPART_EXPR
13938 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13939 && operand_equal_p (TREE_OPERAND (arg0, 0),
13940 TREE_OPERAND (arg1, 0), 0))
13941 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13942 TREE_OPERAND (arg1, 0));
13943 return NULL_TREE;
13945 case ASSERT_EXPR:
13946 /* An ASSERT_EXPR should never be passed to fold_binary. */
13947 gcc_unreachable ();
13949 case VEC_PACK_TRUNC_EXPR:
13950 case VEC_PACK_FIX_TRUNC_EXPR:
13952 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13953 tree *elts;
13955 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13956 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13957 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13958 return NULL_TREE;
13960 elts = XALLOCAVEC (tree, nelts);
13961 if (!vec_cst_ctor_to_array (arg0, elts)
13962 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13963 return NULL_TREE;
13965 for (i = 0; i < nelts; i++)
13967 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13968 ? NOP_EXPR : FIX_TRUNC_EXPR,
13969 TREE_TYPE (type), elts[i]);
13970 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13971 return NULL_TREE;
13974 return build_vector (type, elts);
13977 case VEC_WIDEN_MULT_LO_EXPR:
13978 case VEC_WIDEN_MULT_HI_EXPR:
13979 case VEC_WIDEN_MULT_EVEN_EXPR:
13980 case VEC_WIDEN_MULT_ODD_EXPR:
13982 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13983 unsigned int out, ofs, scale;
13984 tree *elts;
13986 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13987 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13988 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13989 return NULL_TREE;
13991 elts = XALLOCAVEC (tree, nelts * 4);
13992 if (!vec_cst_ctor_to_array (arg0, elts)
13993 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13994 return NULL_TREE;
13996 if (code == VEC_WIDEN_MULT_LO_EXPR)
13997 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13998 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13999 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14000 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14001 scale = 1, ofs = 0;
14002 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14003 scale = 1, ofs = 1;
14005 for (out = 0; out < nelts; out++)
14007 unsigned int in1 = (out << scale) + ofs;
14008 unsigned int in2 = in1 + nelts * 2;
14009 tree t1, t2;
14011 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14012 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14014 if (t1 == NULL_TREE || t2 == NULL_TREE)
14015 return NULL_TREE;
14016 elts[out] = const_binop (MULT_EXPR, t1, t2);
14017 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14018 return NULL_TREE;
14021 return build_vector (type, elts);
14024 default:
14025 return NULL_TREE;
14026 } /* switch (code) */
14029 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14030 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14031 of GOTO_EXPR. */
14033 static tree
14034 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14036 switch (TREE_CODE (*tp))
14038 case LABEL_EXPR:
14039 return *tp;
14041 case GOTO_EXPR:
14042 *walk_subtrees = 0;
14044 /* ... fall through ... */
14046 default:
14047 return NULL_TREE;
14051 /* Return whether the sub-tree ST contains a label which is accessible from
14052 outside the sub-tree. */
14054 static bool
14055 contains_label_p (tree st)
14057 return
14058 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14061 /* Fold a ternary expression of code CODE and type TYPE with operands
14062 OP0, OP1, and OP2. Return the folded expression if folding is
14063 successful. Otherwise, return NULL_TREE. */
14065 tree
14066 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14067 tree op0, tree op1, tree op2)
14069 tree tem;
14070 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14071 enum tree_code_class kind = TREE_CODE_CLASS (code);
14073 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14074 && TREE_CODE_LENGTH (code) == 3);
14076 /* Strip any conversions that don't change the mode. This is safe
14077 for every expression, except for a comparison expression because
14078 its signedness is derived from its operands. So, in the latter
14079 case, only strip conversions that don't change the signedness.
14081 Note that this is done as an internal manipulation within the
14082 constant folder, in order to find the simplest representation of
14083 the arguments so that their form can be studied. In any cases,
14084 the appropriate type conversions should be put back in the tree
14085 that will get out of the constant folder. */
14086 if (op0)
14088 arg0 = op0;
14089 STRIP_NOPS (arg0);
14092 if (op1)
14094 arg1 = op1;
14095 STRIP_NOPS (arg1);
14098 if (op2)
14100 arg2 = op2;
14101 STRIP_NOPS (arg2);
14104 switch (code)
14106 case COMPONENT_REF:
14107 if (TREE_CODE (arg0) == CONSTRUCTOR
14108 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14110 unsigned HOST_WIDE_INT idx;
14111 tree field, value;
14112 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14113 if (field == arg1)
14114 return value;
14116 return NULL_TREE;
14118 case COND_EXPR:
14119 case VEC_COND_EXPR:
14120 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14121 so all simple results must be passed through pedantic_non_lvalue. */
14122 if (TREE_CODE (arg0) == INTEGER_CST)
14124 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14125 tem = integer_zerop (arg0) ? op2 : op1;
14126 /* Only optimize constant conditions when the selected branch
14127 has the same type as the COND_EXPR. This avoids optimizing
14128 away "c ? x : throw", where the throw has a void type.
14129 Avoid throwing away that operand which contains label. */
14130 if ((!TREE_SIDE_EFFECTS (unused_op)
14131 || !contains_label_p (unused_op))
14132 && (! VOID_TYPE_P (TREE_TYPE (tem))
14133 || VOID_TYPE_P (type)))
14134 return pedantic_non_lvalue_loc (loc, tem);
14135 return NULL_TREE;
14137 else if (TREE_CODE (arg0) == VECTOR_CST)
14139 if (integer_all_onesp (arg0))
14140 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14141 if (integer_zerop (arg0))
14142 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14144 if ((TREE_CODE (arg1) == VECTOR_CST
14145 || TREE_CODE (arg1) == CONSTRUCTOR)
14146 && (TREE_CODE (arg2) == VECTOR_CST
14147 || TREE_CODE (arg2) == CONSTRUCTOR))
14149 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14150 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14151 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14152 for (i = 0; i < nelts; i++)
14154 tree val = VECTOR_CST_ELT (arg0, i);
14155 if (integer_all_onesp (val))
14156 sel[i] = i;
14157 else if (integer_zerop (val))
14158 sel[i] = nelts + i;
14159 else /* Currently unreachable. */
14160 return NULL_TREE;
14162 tree t = fold_vec_perm (type, arg1, arg2, sel);
14163 if (t != NULL_TREE)
14164 return t;
14168 if (operand_equal_p (arg1, op2, 0))
14169 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14171 /* If we have A op B ? A : C, we may be able to convert this to a
14172 simpler expression, depending on the operation and the values
14173 of B and C. Signed zeros prevent all of these transformations,
14174 for reasons given above each one.
14176 Also try swapping the arguments and inverting the conditional. */
14177 if (COMPARISON_CLASS_P (arg0)
14178 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14179 arg1, TREE_OPERAND (arg0, 1))
14180 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14182 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14183 if (tem)
14184 return tem;
14187 if (COMPARISON_CLASS_P (arg0)
14188 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14189 op2,
14190 TREE_OPERAND (arg0, 1))
14191 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14193 location_t loc0 = expr_location_or (arg0, loc);
14194 tem = fold_invert_truthvalue (loc0, arg0);
14195 if (tem && COMPARISON_CLASS_P (tem))
14197 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14198 if (tem)
14199 return tem;
14203 /* If the second operand is simpler than the third, swap them
14204 since that produces better jump optimization results. */
14205 if (truth_value_p (TREE_CODE (arg0))
14206 && tree_swap_operands_p (op1, op2, false))
14208 location_t loc0 = expr_location_or (arg0, loc);
14209 /* See if this can be inverted. If it can't, possibly because
14210 it was a floating-point inequality comparison, don't do
14211 anything. */
14212 tem = fold_invert_truthvalue (loc0, arg0);
14213 if (tem)
14214 return fold_build3_loc (loc, code, type, tem, op2, op1);
14217 /* Convert A ? 1 : 0 to simply A. */
14218 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14219 : (integer_onep (op1)
14220 && !VECTOR_TYPE_P (type)))
14221 && integer_zerop (op2)
14222 /* If we try to convert OP0 to our type, the
14223 call to fold will try to move the conversion inside
14224 a COND, which will recurse. In that case, the COND_EXPR
14225 is probably the best choice, so leave it alone. */
14226 && type == TREE_TYPE (arg0))
14227 return pedantic_non_lvalue_loc (loc, arg0);
14229 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14230 over COND_EXPR in cases such as floating point comparisons. */
14231 if (integer_zerop (op1)
14232 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14233 : (integer_onep (op2)
14234 && !VECTOR_TYPE_P (type)))
14235 && truth_value_p (TREE_CODE (arg0)))
14236 return pedantic_non_lvalue_loc (loc,
14237 fold_convert_loc (loc, type,
14238 invert_truthvalue_loc (loc,
14239 arg0)));
14241 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14242 if (TREE_CODE (arg0) == LT_EXPR
14243 && integer_zerop (TREE_OPERAND (arg0, 1))
14244 && integer_zerop (op2)
14245 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14247 /* sign_bit_p looks through both zero and sign extensions,
14248 but for this optimization only sign extensions are
14249 usable. */
14250 tree tem2 = TREE_OPERAND (arg0, 0);
14251 while (tem != tem2)
14253 if (TREE_CODE (tem2) != NOP_EXPR
14254 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14256 tem = NULL_TREE;
14257 break;
14259 tem2 = TREE_OPERAND (tem2, 0);
14261 /* sign_bit_p only checks ARG1 bits within A's precision.
14262 If <sign bit of A> has wider type than A, bits outside
14263 of A's precision in <sign bit of A> need to be checked.
14264 If they are all 0, this optimization needs to be done
14265 in unsigned A's type, if they are all 1 in signed A's type,
14266 otherwise this can't be done. */
14267 if (tem
14268 && TYPE_PRECISION (TREE_TYPE (tem))
14269 < TYPE_PRECISION (TREE_TYPE (arg1))
14270 && TYPE_PRECISION (TREE_TYPE (tem))
14271 < TYPE_PRECISION (type))
14273 unsigned HOST_WIDE_INT mask_lo;
14274 HOST_WIDE_INT mask_hi;
14275 int inner_width, outer_width;
14276 tree tem_type;
14278 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14279 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14280 if (outer_width > TYPE_PRECISION (type))
14281 outer_width = TYPE_PRECISION (type);
14283 if (outer_width > HOST_BITS_PER_WIDE_INT)
14285 mask_hi = (HOST_WIDE_INT_M1U
14286 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14287 mask_lo = -1;
14289 else
14291 mask_hi = 0;
14292 mask_lo = (HOST_WIDE_INT_M1U
14293 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14295 if (inner_width > HOST_BITS_PER_WIDE_INT)
14297 mask_hi &= ~(HOST_WIDE_INT_M1U
14298 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14299 mask_lo = 0;
14301 else
14302 mask_lo &= ~(HOST_WIDE_INT_M1U
14303 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14305 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14306 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14308 tem_type = signed_type_for (TREE_TYPE (tem));
14309 tem = fold_convert_loc (loc, tem_type, tem);
14311 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14312 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14314 tem_type = unsigned_type_for (TREE_TYPE (tem));
14315 tem = fold_convert_loc (loc, tem_type, tem);
14317 else
14318 tem = NULL;
14321 if (tem)
14322 return
14323 fold_convert_loc (loc, type,
14324 fold_build2_loc (loc, BIT_AND_EXPR,
14325 TREE_TYPE (tem), tem,
14326 fold_convert_loc (loc,
14327 TREE_TYPE (tem),
14328 arg1)));
14331 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14332 already handled above. */
14333 if (TREE_CODE (arg0) == BIT_AND_EXPR
14334 && integer_onep (TREE_OPERAND (arg0, 1))
14335 && integer_zerop (op2)
14336 && integer_pow2p (arg1))
14338 tree tem = TREE_OPERAND (arg0, 0);
14339 STRIP_NOPS (tem);
14340 if (TREE_CODE (tem) == RSHIFT_EXPR
14341 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14342 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14343 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14344 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14345 TREE_OPERAND (tem, 0), arg1);
14348 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14349 is probably obsolete because the first operand should be a
14350 truth value (that's why we have the two cases above), but let's
14351 leave it in until we can confirm this for all front-ends. */
14352 if (integer_zerop (op2)
14353 && TREE_CODE (arg0) == NE_EXPR
14354 && integer_zerop (TREE_OPERAND (arg0, 1))
14355 && integer_pow2p (arg1)
14356 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14357 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14358 arg1, OEP_ONLY_CONST))
14359 return pedantic_non_lvalue_loc (loc,
14360 fold_convert_loc (loc, type,
14361 TREE_OPERAND (arg0, 0)));
14363 /* Disable the transformations below for vectors, since
14364 fold_binary_op_with_conditional_arg may undo them immediately,
14365 yielding an infinite loop. */
14366 if (code == VEC_COND_EXPR)
14367 return NULL_TREE;
14369 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14370 if (integer_zerop (op2)
14371 && truth_value_p (TREE_CODE (arg0))
14372 && truth_value_p (TREE_CODE (arg1))
14373 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14374 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14375 : TRUTH_ANDIF_EXPR,
14376 type, fold_convert_loc (loc, type, arg0), arg1);
14378 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14379 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14380 && truth_value_p (TREE_CODE (arg0))
14381 && truth_value_p (TREE_CODE (arg1))
14382 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14384 location_t loc0 = expr_location_or (arg0, loc);
14385 /* Only perform transformation if ARG0 is easily inverted. */
14386 tem = fold_invert_truthvalue (loc0, arg0);
14387 if (tem)
14388 return fold_build2_loc (loc, code == VEC_COND_EXPR
14389 ? BIT_IOR_EXPR
14390 : TRUTH_ORIF_EXPR,
14391 type, fold_convert_loc (loc, type, tem),
14392 arg1);
14395 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14396 if (integer_zerop (arg1)
14397 && truth_value_p (TREE_CODE (arg0))
14398 && truth_value_p (TREE_CODE (op2))
14399 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14401 location_t loc0 = expr_location_or (arg0, loc);
14402 /* Only perform transformation if ARG0 is easily inverted. */
14403 tem = fold_invert_truthvalue (loc0, arg0);
14404 if (tem)
14405 return fold_build2_loc (loc, code == VEC_COND_EXPR
14406 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14407 type, fold_convert_loc (loc, type, tem),
14408 op2);
14411 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14412 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14413 && truth_value_p (TREE_CODE (arg0))
14414 && truth_value_p (TREE_CODE (op2))
14415 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14416 return fold_build2_loc (loc, code == VEC_COND_EXPR
14417 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14418 type, fold_convert_loc (loc, type, arg0), op2);
14420 return NULL_TREE;
14422 case CALL_EXPR:
14423 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14424 of fold_ternary on them. */
14425 gcc_unreachable ();
14427 case BIT_FIELD_REF:
14428 if ((TREE_CODE (arg0) == VECTOR_CST
14429 || (TREE_CODE (arg0) == CONSTRUCTOR
14430 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14431 && (type == TREE_TYPE (TREE_TYPE (arg0))
14432 || (TREE_CODE (type) == VECTOR_TYPE
14433 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14435 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14436 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14437 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14438 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14440 if (n != 0
14441 && (idx % width) == 0
14442 && (n % width) == 0
14443 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14445 idx = idx / width;
14446 n = n / width;
14448 if (TREE_CODE (arg0) == VECTOR_CST)
14450 if (n == 1)
14451 return VECTOR_CST_ELT (arg0, idx);
14453 tree *vals = XALLOCAVEC (tree, n);
14454 for (unsigned i = 0; i < n; ++i)
14455 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14456 return build_vector (type, vals);
14459 /* Constructor elements can be subvectors. */
14460 unsigned HOST_WIDE_INT k = 1;
14461 if (CONSTRUCTOR_NELTS (arg0) != 0)
14463 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14464 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14465 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14468 /* We keep an exact subset of the constructor elements. */
14469 if ((idx % k) == 0 && (n % k) == 0)
14471 if (CONSTRUCTOR_NELTS (arg0) == 0)
14472 return build_constructor (type, NULL);
14473 idx /= k;
14474 n /= k;
14475 if (n == 1)
14477 if (idx < CONSTRUCTOR_NELTS (arg0))
14478 return CONSTRUCTOR_ELT (arg0, idx)->value;
14479 return build_zero_cst (type);
14482 vec<constructor_elt, va_gc> *vals;
14483 vec_alloc (vals, n);
14484 for (unsigned i = 0;
14485 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14486 ++i)
14487 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14488 CONSTRUCTOR_ELT
14489 (arg0, idx + i)->value);
14490 return build_constructor (type, vals);
14492 /* The bitfield references a single constructor element. */
14493 else if (idx + n <= (idx / k + 1) * k)
14495 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14496 return build_zero_cst (type);
14497 else if (n == k)
14498 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14499 else
14500 return fold_build3_loc (loc, code, type,
14501 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14502 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14507 /* A bit-field-ref that referenced the full argument can be stripped. */
14508 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14509 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14510 && integer_zerop (op2))
14511 return fold_convert_loc (loc, type, arg0);
14513 /* On constants we can use native encode/interpret to constant
14514 fold (nearly) all BIT_FIELD_REFs. */
14515 if (CONSTANT_CLASS_P (arg0)
14516 && can_native_interpret_type_p (type)
14517 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14518 /* This limitation should not be necessary, we just need to
14519 round this up to mode size. */
14520 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14521 /* Need bit-shifting of the buffer to relax the following. */
14522 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14524 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14525 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14526 unsigned HOST_WIDE_INT clen;
14527 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14528 /* ??? We cannot tell native_encode_expr to start at
14529 some random byte only. So limit us to a reasonable amount
14530 of work. */
14531 if (clen <= 4096)
14533 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14534 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14535 if (len > 0
14536 && len * BITS_PER_UNIT >= bitpos + bitsize)
14538 tree v = native_interpret_expr (type,
14539 b + bitpos / BITS_PER_UNIT,
14540 bitsize / BITS_PER_UNIT);
14541 if (v)
14542 return v;
14547 return NULL_TREE;
14549 case FMA_EXPR:
14550 /* For integers we can decompose the FMA if possible. */
14551 if (TREE_CODE (arg0) == INTEGER_CST
14552 && TREE_CODE (arg1) == INTEGER_CST)
14553 return fold_build2_loc (loc, PLUS_EXPR, type,
14554 const_binop (MULT_EXPR, arg0, arg1), arg2);
14555 if (integer_zerop (arg2))
14556 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14558 return fold_fma (loc, type, arg0, arg1, arg2);
14560 case VEC_PERM_EXPR:
14561 if (TREE_CODE (arg2) == VECTOR_CST)
14563 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14564 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14565 tree t;
14566 bool need_mask_canon = false;
14567 bool all_in_vec0 = true;
14568 bool all_in_vec1 = true;
14569 bool maybe_identity = true;
14570 bool single_arg = (op0 == op1);
14571 bool changed = false;
14573 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14574 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14575 for (i = 0; i < nelts; i++)
14577 tree val = VECTOR_CST_ELT (arg2, i);
14578 if (TREE_CODE (val) != INTEGER_CST)
14579 return NULL_TREE;
14581 sel[i] = TREE_INT_CST_LOW (val) & mask;
14582 if (TREE_INT_CST_HIGH (val)
14583 || ((unsigned HOST_WIDE_INT)
14584 TREE_INT_CST_LOW (val) != sel[i]))
14585 need_mask_canon = true;
14587 if (sel[i] < nelts)
14588 all_in_vec1 = false;
14589 else
14590 all_in_vec0 = false;
14592 if ((sel[i] & (nelts-1)) != i)
14593 maybe_identity = false;
14596 if (maybe_identity)
14598 if (all_in_vec0)
14599 return op0;
14600 if (all_in_vec1)
14601 return op1;
14604 if (all_in_vec0)
14605 op1 = op0;
14606 else if (all_in_vec1)
14608 op0 = op1;
14609 for (i = 0; i < nelts; i++)
14610 sel[i] -= nelts;
14611 need_mask_canon = true;
14614 if ((TREE_CODE (op0) == VECTOR_CST
14615 || TREE_CODE (op0) == CONSTRUCTOR)
14616 && (TREE_CODE (op1) == VECTOR_CST
14617 || TREE_CODE (op1) == CONSTRUCTOR))
14619 t = fold_vec_perm (type, op0, op1, sel);
14620 if (t != NULL_TREE)
14621 return t;
14624 if (op0 == op1 && !single_arg)
14625 changed = true;
14627 if (need_mask_canon && arg2 == op2)
14629 tree *tsel = XALLOCAVEC (tree, nelts);
14630 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14631 for (i = 0; i < nelts; i++)
14632 tsel[i] = build_int_cst (eltype, sel[i]);
14633 op2 = build_vector (TREE_TYPE (arg2), tsel);
14634 changed = true;
14637 if (changed)
14638 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14640 return NULL_TREE;
14642 default:
14643 return NULL_TREE;
14644 } /* switch (code) */
14647 /* Perform constant folding and related simplification of EXPR.
14648 The related simplifications include x*1 => x, x*0 => 0, etc.,
14649 and application of the associative law.
14650 NOP_EXPR conversions may be removed freely (as long as we
14651 are careful not to change the type of the overall expression).
14652 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14653 but we can constant-fold them if they have constant operands. */
14655 #ifdef ENABLE_FOLD_CHECKING
14656 # define fold(x) fold_1 (x)
14657 static tree fold_1 (tree);
14658 static
14659 #endif
14660 tree
14661 fold (tree expr)
14663 const tree t = expr;
14664 enum tree_code code = TREE_CODE (t);
14665 enum tree_code_class kind = TREE_CODE_CLASS (code);
14666 tree tem;
14667 location_t loc = EXPR_LOCATION (expr);
14669 /* Return right away if a constant. */
14670 if (kind == tcc_constant)
14671 return t;
14673 /* CALL_EXPR-like objects with variable numbers of operands are
14674 treated specially. */
14675 if (kind == tcc_vl_exp)
14677 if (code == CALL_EXPR)
14679 tem = fold_call_expr (loc, expr, false);
14680 return tem ? tem : expr;
14682 return expr;
14685 if (IS_EXPR_CODE_CLASS (kind))
14687 tree type = TREE_TYPE (t);
14688 tree op0, op1, op2;
14690 switch (TREE_CODE_LENGTH (code))
14692 case 1:
14693 op0 = TREE_OPERAND (t, 0);
14694 tem = fold_unary_loc (loc, code, type, op0);
14695 return tem ? tem : expr;
14696 case 2:
14697 op0 = TREE_OPERAND (t, 0);
14698 op1 = TREE_OPERAND (t, 1);
14699 tem = fold_binary_loc (loc, code, type, op0, op1);
14700 return tem ? tem : expr;
14701 case 3:
14702 op0 = TREE_OPERAND (t, 0);
14703 op1 = TREE_OPERAND (t, 1);
14704 op2 = TREE_OPERAND (t, 2);
14705 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14706 return tem ? tem : expr;
14707 default:
14708 break;
14712 switch (code)
14714 case ARRAY_REF:
14716 tree op0 = TREE_OPERAND (t, 0);
14717 tree op1 = TREE_OPERAND (t, 1);
14719 if (TREE_CODE (op1) == INTEGER_CST
14720 && TREE_CODE (op0) == CONSTRUCTOR
14721 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14723 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14724 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14725 unsigned HOST_WIDE_INT begin = 0;
14727 /* Find a matching index by means of a binary search. */
14728 while (begin != end)
14730 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14731 tree index = (*elts)[middle].index;
14733 if (TREE_CODE (index) == INTEGER_CST
14734 && tree_int_cst_lt (index, op1))
14735 begin = middle + 1;
14736 else if (TREE_CODE (index) == INTEGER_CST
14737 && tree_int_cst_lt (op1, index))
14738 end = middle;
14739 else if (TREE_CODE (index) == RANGE_EXPR
14740 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14741 begin = middle + 1;
14742 else if (TREE_CODE (index) == RANGE_EXPR
14743 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14744 end = middle;
14745 else
14746 return (*elts)[middle].value;
14750 return t;
14753 /* Return a VECTOR_CST if possible. */
14754 case CONSTRUCTOR:
14756 tree type = TREE_TYPE (t);
14757 if (TREE_CODE (type) != VECTOR_TYPE)
14758 return t;
14760 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14761 unsigned HOST_WIDE_INT idx, pos = 0;
14762 tree value;
14764 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14766 if (!CONSTANT_CLASS_P (value))
14767 return t;
14768 if (TREE_CODE (value) == VECTOR_CST)
14770 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14771 vec[pos++] = VECTOR_CST_ELT (value, i);
14773 else
14774 vec[pos++] = value;
14776 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14777 vec[pos] = build_zero_cst (TREE_TYPE (type));
14779 return build_vector (type, vec);
14782 case CONST_DECL:
14783 return fold (DECL_INITIAL (t));
14785 default:
14786 return t;
14787 } /* switch (code) */
14790 #ifdef ENABLE_FOLD_CHECKING
14791 #undef fold
14793 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14794 hash_table <pointer_hash <tree_node> >);
14795 static void fold_check_failed (const_tree, const_tree);
14796 void print_fold_checksum (const_tree);
14798 /* When --enable-checking=fold, compute a digest of expr before
14799 and after actual fold call to see if fold did not accidentally
14800 change original expr. */
14802 tree
14803 fold (tree expr)
14805 tree ret;
14806 struct md5_ctx ctx;
14807 unsigned char checksum_before[16], checksum_after[16];
14808 hash_table <pointer_hash <tree_node> > ht;
14810 ht.create (32);
14811 md5_init_ctx (&ctx);
14812 fold_checksum_tree (expr, &ctx, ht);
14813 md5_finish_ctx (&ctx, checksum_before);
14814 ht.empty ();
14816 ret = fold_1 (expr);
14818 md5_init_ctx (&ctx);
14819 fold_checksum_tree (expr, &ctx, ht);
14820 md5_finish_ctx (&ctx, checksum_after);
14821 ht.dispose ();
14823 if (memcmp (checksum_before, checksum_after, 16))
14824 fold_check_failed (expr, ret);
14826 return ret;
14829 void
14830 print_fold_checksum (const_tree expr)
14832 struct md5_ctx ctx;
14833 unsigned char checksum[16], cnt;
14834 hash_table <pointer_hash <tree_node> > ht;
14836 ht.create (32);
14837 md5_init_ctx (&ctx);
14838 fold_checksum_tree (expr, &ctx, ht);
14839 md5_finish_ctx (&ctx, checksum);
14840 ht.dispose ();
14841 for (cnt = 0; cnt < 16; ++cnt)
14842 fprintf (stderr, "%02x", checksum[cnt]);
14843 putc ('\n', stderr);
14846 static void
14847 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14849 internal_error ("fold check: original tree changed by fold");
14852 static void
14853 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14854 hash_table <pointer_hash <tree_node> > ht)
14856 tree_node **slot;
14857 enum tree_code code;
14858 union tree_node buf;
14859 int i, len;
14861 recursive_label:
14862 if (expr == NULL)
14863 return;
14864 slot = ht.find_slot (expr, INSERT);
14865 if (*slot != NULL)
14866 return;
14867 *slot = CONST_CAST_TREE (expr);
14868 code = TREE_CODE (expr);
14869 if (TREE_CODE_CLASS (code) == tcc_declaration
14870 && DECL_ASSEMBLER_NAME_SET_P (expr))
14872 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14873 memcpy ((char *) &buf, expr, tree_size (expr));
14874 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14875 expr = (tree) &buf;
14877 else if (TREE_CODE_CLASS (code) == tcc_type
14878 && (TYPE_POINTER_TO (expr)
14879 || TYPE_REFERENCE_TO (expr)
14880 || TYPE_CACHED_VALUES_P (expr)
14881 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14882 || TYPE_NEXT_VARIANT (expr)))
14884 /* Allow these fields to be modified. */
14885 tree tmp;
14886 memcpy ((char *) &buf, expr, tree_size (expr));
14887 expr = tmp = (tree) &buf;
14888 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14889 TYPE_POINTER_TO (tmp) = NULL;
14890 TYPE_REFERENCE_TO (tmp) = NULL;
14891 TYPE_NEXT_VARIANT (tmp) = NULL;
14892 if (TYPE_CACHED_VALUES_P (tmp))
14894 TYPE_CACHED_VALUES_P (tmp) = 0;
14895 TYPE_CACHED_VALUES (tmp) = NULL;
14898 md5_process_bytes (expr, tree_size (expr), ctx);
14899 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14900 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14901 if (TREE_CODE_CLASS (code) != tcc_type
14902 && TREE_CODE_CLASS (code) != tcc_declaration
14903 && code != TREE_LIST
14904 && code != SSA_NAME
14905 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14906 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14907 switch (TREE_CODE_CLASS (code))
14909 case tcc_constant:
14910 switch (code)
14912 case STRING_CST:
14913 md5_process_bytes (TREE_STRING_POINTER (expr),
14914 TREE_STRING_LENGTH (expr), ctx);
14915 break;
14916 case COMPLEX_CST:
14917 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14918 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14919 break;
14920 case VECTOR_CST:
14921 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14922 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14923 break;
14924 default:
14925 break;
14927 break;
14928 case tcc_exceptional:
14929 switch (code)
14931 case TREE_LIST:
14932 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14933 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14934 expr = TREE_CHAIN (expr);
14935 goto recursive_label;
14936 break;
14937 case TREE_VEC:
14938 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14939 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14940 break;
14941 default:
14942 break;
14944 break;
14945 case tcc_expression:
14946 case tcc_reference:
14947 case tcc_comparison:
14948 case tcc_unary:
14949 case tcc_binary:
14950 case tcc_statement:
14951 case tcc_vl_exp:
14952 len = TREE_OPERAND_LENGTH (expr);
14953 for (i = 0; i < len; ++i)
14954 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14955 break;
14956 case tcc_declaration:
14957 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14958 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14959 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14961 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14962 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14963 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14964 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14965 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14967 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14968 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14970 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14972 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14973 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14974 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14976 break;
14977 case tcc_type:
14978 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14979 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14980 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14981 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14982 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14983 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14984 if (INTEGRAL_TYPE_P (expr)
14985 || SCALAR_FLOAT_TYPE_P (expr))
14987 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14988 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14990 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14991 if (TREE_CODE (expr) == RECORD_TYPE
14992 || TREE_CODE (expr) == UNION_TYPE
14993 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14994 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14995 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14996 break;
14997 default:
14998 break;
15002 /* Helper function for outputting the checksum of a tree T. When
15003 debugging with gdb, you can "define mynext" to be "next" followed
15004 by "call debug_fold_checksum (op0)", then just trace down till the
15005 outputs differ. */
15007 DEBUG_FUNCTION void
15008 debug_fold_checksum (const_tree t)
15010 int i;
15011 unsigned char checksum[16];
15012 struct md5_ctx ctx;
15013 hash_table <pointer_hash <tree_node> > ht;
15014 ht.create (32);
15016 md5_init_ctx (&ctx);
15017 fold_checksum_tree (t, &ctx, ht);
15018 md5_finish_ctx (&ctx, checksum);
15019 ht.empty ();
15021 for (i = 0; i < 16; i++)
15022 fprintf (stderr, "%d ", checksum[i]);
15024 fprintf (stderr, "\n");
15027 #endif
15029 /* Fold a unary tree expression with code CODE of type TYPE with an
15030 operand OP0. LOC is the location of the resulting expression.
15031 Return a folded expression if successful. Otherwise, return a tree
15032 expression with code CODE of type TYPE with an operand OP0. */
15034 tree
15035 fold_build1_stat_loc (location_t loc,
15036 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15038 tree tem;
15039 #ifdef ENABLE_FOLD_CHECKING
15040 unsigned char checksum_before[16], checksum_after[16];
15041 struct md5_ctx ctx;
15042 hash_table <pointer_hash <tree_node> > ht;
15044 ht.create (32);
15045 md5_init_ctx (&ctx);
15046 fold_checksum_tree (op0, &ctx, ht);
15047 md5_finish_ctx (&ctx, checksum_before);
15048 ht.empty ();
15049 #endif
15051 tem = fold_unary_loc (loc, code, type, op0);
15052 if (!tem)
15053 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15055 #ifdef ENABLE_FOLD_CHECKING
15056 md5_init_ctx (&ctx);
15057 fold_checksum_tree (op0, &ctx, ht);
15058 md5_finish_ctx (&ctx, checksum_after);
15059 ht.dispose ();
15061 if (memcmp (checksum_before, checksum_after, 16))
15062 fold_check_failed (op0, tem);
15063 #endif
15064 return tem;
15067 /* Fold a binary tree expression with code CODE of type TYPE with
15068 operands OP0 and OP1. LOC is the location of the resulting
15069 expression. Return a folded expression if successful. Otherwise,
15070 return a tree expression with code CODE of type TYPE with operands
15071 OP0 and OP1. */
15073 tree
15074 fold_build2_stat_loc (location_t loc,
15075 enum tree_code code, tree type, tree op0, tree op1
15076 MEM_STAT_DECL)
15078 tree tem;
15079 #ifdef ENABLE_FOLD_CHECKING
15080 unsigned char checksum_before_op0[16],
15081 checksum_before_op1[16],
15082 checksum_after_op0[16],
15083 checksum_after_op1[16];
15084 struct md5_ctx ctx;
15085 hash_table <pointer_hash <tree_node> > ht;
15087 ht.create (32);
15088 md5_init_ctx (&ctx);
15089 fold_checksum_tree (op0, &ctx, ht);
15090 md5_finish_ctx (&ctx, checksum_before_op0);
15091 ht.empty ();
15093 md5_init_ctx (&ctx);
15094 fold_checksum_tree (op1, &ctx, ht);
15095 md5_finish_ctx (&ctx, checksum_before_op1);
15096 ht.empty ();
15097 #endif
15099 tem = fold_binary_loc (loc, code, type, op0, op1);
15100 if (!tem)
15101 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15103 #ifdef ENABLE_FOLD_CHECKING
15104 md5_init_ctx (&ctx);
15105 fold_checksum_tree (op0, &ctx, ht);
15106 md5_finish_ctx (&ctx, checksum_after_op0);
15107 ht.empty ();
15109 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15110 fold_check_failed (op0, tem);
15112 md5_init_ctx (&ctx);
15113 fold_checksum_tree (op1, &ctx, ht);
15114 md5_finish_ctx (&ctx, checksum_after_op1);
15115 ht.dispose ();
15117 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15118 fold_check_failed (op1, tem);
15119 #endif
15120 return tem;
15123 /* Fold a ternary tree expression with code CODE of type TYPE with
15124 operands OP0, OP1, and OP2. Return a folded expression if
15125 successful. Otherwise, return a tree expression with code CODE of
15126 type TYPE with operands OP0, OP1, and OP2. */
15128 tree
15129 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15130 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15132 tree tem;
15133 #ifdef ENABLE_FOLD_CHECKING
15134 unsigned char checksum_before_op0[16],
15135 checksum_before_op1[16],
15136 checksum_before_op2[16],
15137 checksum_after_op0[16],
15138 checksum_after_op1[16],
15139 checksum_after_op2[16];
15140 struct md5_ctx ctx;
15141 hash_table <pointer_hash <tree_node> > ht;
15143 ht.create (32);
15144 md5_init_ctx (&ctx);
15145 fold_checksum_tree (op0, &ctx, ht);
15146 md5_finish_ctx (&ctx, checksum_before_op0);
15147 ht.empty ();
15149 md5_init_ctx (&ctx);
15150 fold_checksum_tree (op1, &ctx, ht);
15151 md5_finish_ctx (&ctx, checksum_before_op1);
15152 ht.empty ();
15154 md5_init_ctx (&ctx);
15155 fold_checksum_tree (op2, &ctx, ht);
15156 md5_finish_ctx (&ctx, checksum_before_op2);
15157 ht.empty ();
15158 #endif
15160 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15161 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15162 if (!tem)
15163 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15165 #ifdef ENABLE_FOLD_CHECKING
15166 md5_init_ctx (&ctx);
15167 fold_checksum_tree (op0, &ctx, ht);
15168 md5_finish_ctx (&ctx, checksum_after_op0);
15169 ht.empty ();
15171 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15172 fold_check_failed (op0, tem);
15174 md5_init_ctx (&ctx);
15175 fold_checksum_tree (op1, &ctx, ht);
15176 md5_finish_ctx (&ctx, checksum_after_op1);
15177 ht.empty ();
15179 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15180 fold_check_failed (op1, tem);
15182 md5_init_ctx (&ctx);
15183 fold_checksum_tree (op2, &ctx, ht);
15184 md5_finish_ctx (&ctx, checksum_after_op2);
15185 ht.dispose ();
15187 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15188 fold_check_failed (op2, tem);
15189 #endif
15190 return tem;
15193 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15194 arguments in ARGARRAY, and a null static chain.
15195 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15196 of type TYPE from the given operands as constructed by build_call_array. */
15198 tree
15199 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15200 int nargs, tree *argarray)
15202 tree tem;
15203 #ifdef ENABLE_FOLD_CHECKING
15204 unsigned char checksum_before_fn[16],
15205 checksum_before_arglist[16],
15206 checksum_after_fn[16],
15207 checksum_after_arglist[16];
15208 struct md5_ctx ctx;
15209 hash_table <pointer_hash <tree_node> > ht;
15210 int i;
15212 ht.create (32);
15213 md5_init_ctx (&ctx);
15214 fold_checksum_tree (fn, &ctx, ht);
15215 md5_finish_ctx (&ctx, checksum_before_fn);
15216 ht.empty ();
15218 md5_init_ctx (&ctx);
15219 for (i = 0; i < nargs; i++)
15220 fold_checksum_tree (argarray[i], &ctx, ht);
15221 md5_finish_ctx (&ctx, checksum_before_arglist);
15222 ht.empty ();
15223 #endif
15225 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15227 #ifdef ENABLE_FOLD_CHECKING
15228 md5_init_ctx (&ctx);
15229 fold_checksum_tree (fn, &ctx, ht);
15230 md5_finish_ctx (&ctx, checksum_after_fn);
15231 ht.empty ();
15233 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15234 fold_check_failed (fn, tem);
15236 md5_init_ctx (&ctx);
15237 for (i = 0; i < nargs; i++)
15238 fold_checksum_tree (argarray[i], &ctx, ht);
15239 md5_finish_ctx (&ctx, checksum_after_arglist);
15240 ht.dispose ();
15242 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15243 fold_check_failed (NULL_TREE, tem);
15244 #endif
15245 return tem;
15248 /* Perform constant folding and related simplification of initializer
15249 expression EXPR. These behave identically to "fold_buildN" but ignore
15250 potential run-time traps and exceptions that fold must preserve. */
15252 #define START_FOLD_INIT \
15253 int saved_signaling_nans = flag_signaling_nans;\
15254 int saved_trapping_math = flag_trapping_math;\
15255 int saved_rounding_math = flag_rounding_math;\
15256 int saved_trapv = flag_trapv;\
15257 int saved_folding_initializer = folding_initializer;\
15258 flag_signaling_nans = 0;\
15259 flag_trapping_math = 0;\
15260 flag_rounding_math = 0;\
15261 flag_trapv = 0;\
15262 folding_initializer = 1;
15264 #define END_FOLD_INIT \
15265 flag_signaling_nans = saved_signaling_nans;\
15266 flag_trapping_math = saved_trapping_math;\
15267 flag_rounding_math = saved_rounding_math;\
15268 flag_trapv = saved_trapv;\
15269 folding_initializer = saved_folding_initializer;
15271 tree
15272 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15273 tree type, tree op)
15275 tree result;
15276 START_FOLD_INIT;
15278 result = fold_build1_loc (loc, code, type, op);
15280 END_FOLD_INIT;
15281 return result;
15284 tree
15285 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15286 tree type, tree op0, tree op1)
15288 tree result;
15289 START_FOLD_INIT;
15291 result = fold_build2_loc (loc, code, type, op0, op1);
15293 END_FOLD_INIT;
15294 return result;
15297 tree
15298 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15299 tree type, tree op0, tree op1, tree op2)
15301 tree result;
15302 START_FOLD_INIT;
15304 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15306 END_FOLD_INIT;
15307 return result;
15310 tree
15311 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15312 int nargs, tree *argarray)
15314 tree result;
15315 START_FOLD_INIT;
15317 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15319 END_FOLD_INIT;
15320 return result;
15323 #undef START_FOLD_INIT
15324 #undef END_FOLD_INIT
15326 /* Determine if first argument is a multiple of second argument. Return 0 if
15327 it is not, or we cannot easily determined it to be.
15329 An example of the sort of thing we care about (at this point; this routine
15330 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15331 fold cases do now) is discovering that
15333 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15335 is a multiple of
15337 SAVE_EXPR (J * 8)
15339 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15341 This code also handles discovering that
15343 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15345 is a multiple of 8 so we don't have to worry about dealing with a
15346 possible remainder.
15348 Note that we *look* inside a SAVE_EXPR only to determine how it was
15349 calculated; it is not safe for fold to do much of anything else with the
15350 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15351 at run time. For example, the latter example above *cannot* be implemented
15352 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15353 evaluation time of the original SAVE_EXPR is not necessarily the same at
15354 the time the new expression is evaluated. The only optimization of this
15355 sort that would be valid is changing
15357 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15359 divided by 8 to
15361 SAVE_EXPR (I) * SAVE_EXPR (J)
15363 (where the same SAVE_EXPR (J) is used in the original and the
15364 transformed version). */
15367 multiple_of_p (tree type, const_tree top, const_tree bottom)
15369 if (operand_equal_p (top, bottom, 0))
15370 return 1;
15372 if (TREE_CODE (type) != INTEGER_TYPE)
15373 return 0;
15375 switch (TREE_CODE (top))
15377 case BIT_AND_EXPR:
15378 /* Bitwise and provides a power of two multiple. If the mask is
15379 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15380 if (!integer_pow2p (bottom))
15381 return 0;
15382 /* FALLTHRU */
15384 case MULT_EXPR:
15385 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15386 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15388 case PLUS_EXPR:
15389 case MINUS_EXPR:
15390 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15391 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15393 case LSHIFT_EXPR:
15394 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15396 tree op1, t1;
15398 op1 = TREE_OPERAND (top, 1);
15399 /* const_binop may not detect overflow correctly,
15400 so check for it explicitly here. */
15401 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15402 > TREE_INT_CST_LOW (op1)
15403 && TREE_INT_CST_HIGH (op1) == 0
15404 && 0 != (t1 = fold_convert (type,
15405 const_binop (LSHIFT_EXPR,
15406 size_one_node,
15407 op1)))
15408 && !TREE_OVERFLOW (t1))
15409 return multiple_of_p (type, t1, bottom);
15411 return 0;
15413 case NOP_EXPR:
15414 /* Can't handle conversions from non-integral or wider integral type. */
15415 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15416 || (TYPE_PRECISION (type)
15417 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15418 return 0;
15420 /* .. fall through ... */
15422 case SAVE_EXPR:
15423 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15425 case COND_EXPR:
15426 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15427 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15429 case INTEGER_CST:
15430 if (TREE_CODE (bottom) != INTEGER_CST
15431 || integer_zerop (bottom)
15432 || (TYPE_UNSIGNED (type)
15433 && (tree_int_cst_sgn (top) < 0
15434 || tree_int_cst_sgn (bottom) < 0)))
15435 return 0;
15436 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15437 top, bottom));
15439 default:
15440 return 0;
15444 /* Return true if CODE or TYPE is known to be non-negative. */
15446 static bool
15447 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15449 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15450 && truth_value_p (code))
15451 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15452 have a signed:1 type (where the value is -1 and 0). */
15453 return true;
15454 return false;
15457 /* Return true if (CODE OP0) is known to be non-negative. If the return
15458 value is based on the assumption that signed overflow is undefined,
15459 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15460 *STRICT_OVERFLOW_P. */
15462 bool
15463 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15464 bool *strict_overflow_p)
15466 if (TYPE_UNSIGNED (type))
15467 return true;
15469 switch (code)
15471 case ABS_EXPR:
15472 /* We can't return 1 if flag_wrapv is set because
15473 ABS_EXPR<INT_MIN> = INT_MIN. */
15474 if (!INTEGRAL_TYPE_P (type))
15475 return true;
15476 if (TYPE_OVERFLOW_UNDEFINED (type))
15478 *strict_overflow_p = true;
15479 return true;
15481 break;
15483 case NON_LVALUE_EXPR:
15484 case FLOAT_EXPR:
15485 case FIX_TRUNC_EXPR:
15486 return tree_expr_nonnegative_warnv_p (op0,
15487 strict_overflow_p);
15489 case NOP_EXPR:
15491 tree inner_type = TREE_TYPE (op0);
15492 tree outer_type = type;
15494 if (TREE_CODE (outer_type) == REAL_TYPE)
15496 if (TREE_CODE (inner_type) == REAL_TYPE)
15497 return tree_expr_nonnegative_warnv_p (op0,
15498 strict_overflow_p);
15499 if (INTEGRAL_TYPE_P (inner_type))
15501 if (TYPE_UNSIGNED (inner_type))
15502 return true;
15503 return tree_expr_nonnegative_warnv_p (op0,
15504 strict_overflow_p);
15507 else if (INTEGRAL_TYPE_P (outer_type))
15509 if (TREE_CODE (inner_type) == REAL_TYPE)
15510 return tree_expr_nonnegative_warnv_p (op0,
15511 strict_overflow_p);
15512 if (INTEGRAL_TYPE_P (inner_type))
15513 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15514 && TYPE_UNSIGNED (inner_type);
15517 break;
15519 default:
15520 return tree_simple_nonnegative_warnv_p (code, type);
15523 /* We don't know sign of `t', so be conservative and return false. */
15524 return false;
15527 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15528 value is based on the assumption that signed overflow is undefined,
15529 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15530 *STRICT_OVERFLOW_P. */
15532 bool
15533 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15534 tree op1, bool *strict_overflow_p)
15536 if (TYPE_UNSIGNED (type))
15537 return true;
15539 switch (code)
15541 case POINTER_PLUS_EXPR:
15542 case PLUS_EXPR:
15543 if (FLOAT_TYPE_P (type))
15544 return (tree_expr_nonnegative_warnv_p (op0,
15545 strict_overflow_p)
15546 && tree_expr_nonnegative_warnv_p (op1,
15547 strict_overflow_p));
15549 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15550 both unsigned and at least 2 bits shorter than the result. */
15551 if (TREE_CODE (type) == INTEGER_TYPE
15552 && TREE_CODE (op0) == NOP_EXPR
15553 && TREE_CODE (op1) == NOP_EXPR)
15555 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15556 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15557 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15558 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15560 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15561 TYPE_PRECISION (inner2)) + 1;
15562 return prec < TYPE_PRECISION (type);
15565 break;
15567 case MULT_EXPR:
15568 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15570 /* x * x is always non-negative for floating point x
15571 or without overflow. */
15572 if (operand_equal_p (op0, op1, 0)
15573 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15574 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15576 if (TYPE_OVERFLOW_UNDEFINED (type))
15577 *strict_overflow_p = true;
15578 return true;
15582 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15583 both unsigned and their total bits is shorter than the result. */
15584 if (TREE_CODE (type) == INTEGER_TYPE
15585 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15586 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15588 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15589 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15590 : TREE_TYPE (op0);
15591 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15592 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15593 : TREE_TYPE (op1);
15595 bool unsigned0 = TYPE_UNSIGNED (inner0);
15596 bool unsigned1 = TYPE_UNSIGNED (inner1);
15598 if (TREE_CODE (op0) == INTEGER_CST)
15599 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15601 if (TREE_CODE (op1) == INTEGER_CST)
15602 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15604 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15605 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15607 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15608 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15609 : TYPE_PRECISION (inner0);
15611 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15612 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15613 : TYPE_PRECISION (inner1);
15615 return precision0 + precision1 < TYPE_PRECISION (type);
15618 return false;
15620 case BIT_AND_EXPR:
15621 case MAX_EXPR:
15622 return (tree_expr_nonnegative_warnv_p (op0,
15623 strict_overflow_p)
15624 || tree_expr_nonnegative_warnv_p (op1,
15625 strict_overflow_p));
15627 case BIT_IOR_EXPR:
15628 case BIT_XOR_EXPR:
15629 case MIN_EXPR:
15630 case RDIV_EXPR:
15631 case TRUNC_DIV_EXPR:
15632 case CEIL_DIV_EXPR:
15633 case FLOOR_DIV_EXPR:
15634 case ROUND_DIV_EXPR:
15635 return (tree_expr_nonnegative_warnv_p (op0,
15636 strict_overflow_p)
15637 && tree_expr_nonnegative_warnv_p (op1,
15638 strict_overflow_p));
15640 case TRUNC_MOD_EXPR:
15641 case CEIL_MOD_EXPR:
15642 case FLOOR_MOD_EXPR:
15643 case ROUND_MOD_EXPR:
15644 return tree_expr_nonnegative_warnv_p (op0,
15645 strict_overflow_p);
15646 default:
15647 return tree_simple_nonnegative_warnv_p (code, type);
15650 /* We don't know sign of `t', so be conservative and return false. */
15651 return false;
15654 /* Return true if T is known to be non-negative. If the return
15655 value is based on the assumption that signed overflow is undefined,
15656 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15657 *STRICT_OVERFLOW_P. */
15659 bool
15660 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15662 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15663 return true;
15665 switch (TREE_CODE (t))
15667 case INTEGER_CST:
15668 return tree_int_cst_sgn (t) >= 0;
15670 case REAL_CST:
15671 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15673 case FIXED_CST:
15674 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15676 case COND_EXPR:
15677 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15678 strict_overflow_p)
15679 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15680 strict_overflow_p));
15681 default:
15682 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15683 TREE_TYPE (t));
15685 /* We don't know sign of `t', so be conservative and return false. */
15686 return false;
15689 /* Return true if T is known to be non-negative. If the return
15690 value is based on the assumption that signed overflow is undefined,
15691 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15692 *STRICT_OVERFLOW_P. */
15694 bool
15695 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15696 tree arg0, tree arg1, bool *strict_overflow_p)
15698 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15699 switch (DECL_FUNCTION_CODE (fndecl))
15701 CASE_FLT_FN (BUILT_IN_ACOS):
15702 CASE_FLT_FN (BUILT_IN_ACOSH):
15703 CASE_FLT_FN (BUILT_IN_CABS):
15704 CASE_FLT_FN (BUILT_IN_COSH):
15705 CASE_FLT_FN (BUILT_IN_ERFC):
15706 CASE_FLT_FN (BUILT_IN_EXP):
15707 CASE_FLT_FN (BUILT_IN_EXP10):
15708 CASE_FLT_FN (BUILT_IN_EXP2):
15709 CASE_FLT_FN (BUILT_IN_FABS):
15710 CASE_FLT_FN (BUILT_IN_FDIM):
15711 CASE_FLT_FN (BUILT_IN_HYPOT):
15712 CASE_FLT_FN (BUILT_IN_POW10):
15713 CASE_INT_FN (BUILT_IN_FFS):
15714 CASE_INT_FN (BUILT_IN_PARITY):
15715 CASE_INT_FN (BUILT_IN_POPCOUNT):
15716 CASE_INT_FN (BUILT_IN_CLZ):
15717 CASE_INT_FN (BUILT_IN_CLRSB):
15718 case BUILT_IN_BSWAP32:
15719 case BUILT_IN_BSWAP64:
15720 /* Always true. */
15721 return true;
15723 CASE_FLT_FN (BUILT_IN_SQRT):
15724 /* sqrt(-0.0) is -0.0. */
15725 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15726 return true;
15727 return tree_expr_nonnegative_warnv_p (arg0,
15728 strict_overflow_p);
15730 CASE_FLT_FN (BUILT_IN_ASINH):
15731 CASE_FLT_FN (BUILT_IN_ATAN):
15732 CASE_FLT_FN (BUILT_IN_ATANH):
15733 CASE_FLT_FN (BUILT_IN_CBRT):
15734 CASE_FLT_FN (BUILT_IN_CEIL):
15735 CASE_FLT_FN (BUILT_IN_ERF):
15736 CASE_FLT_FN (BUILT_IN_EXPM1):
15737 CASE_FLT_FN (BUILT_IN_FLOOR):
15738 CASE_FLT_FN (BUILT_IN_FMOD):
15739 CASE_FLT_FN (BUILT_IN_FREXP):
15740 CASE_FLT_FN (BUILT_IN_ICEIL):
15741 CASE_FLT_FN (BUILT_IN_IFLOOR):
15742 CASE_FLT_FN (BUILT_IN_IRINT):
15743 CASE_FLT_FN (BUILT_IN_IROUND):
15744 CASE_FLT_FN (BUILT_IN_LCEIL):
15745 CASE_FLT_FN (BUILT_IN_LDEXP):
15746 CASE_FLT_FN (BUILT_IN_LFLOOR):
15747 CASE_FLT_FN (BUILT_IN_LLCEIL):
15748 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15749 CASE_FLT_FN (BUILT_IN_LLRINT):
15750 CASE_FLT_FN (BUILT_IN_LLROUND):
15751 CASE_FLT_FN (BUILT_IN_LRINT):
15752 CASE_FLT_FN (BUILT_IN_LROUND):
15753 CASE_FLT_FN (BUILT_IN_MODF):
15754 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15755 CASE_FLT_FN (BUILT_IN_RINT):
15756 CASE_FLT_FN (BUILT_IN_ROUND):
15757 CASE_FLT_FN (BUILT_IN_SCALB):
15758 CASE_FLT_FN (BUILT_IN_SCALBLN):
15759 CASE_FLT_FN (BUILT_IN_SCALBN):
15760 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15761 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15762 CASE_FLT_FN (BUILT_IN_SINH):
15763 CASE_FLT_FN (BUILT_IN_TANH):
15764 CASE_FLT_FN (BUILT_IN_TRUNC):
15765 /* True if the 1st argument is nonnegative. */
15766 return tree_expr_nonnegative_warnv_p (arg0,
15767 strict_overflow_p);
15769 CASE_FLT_FN (BUILT_IN_FMAX):
15770 /* True if the 1st OR 2nd arguments are nonnegative. */
15771 return (tree_expr_nonnegative_warnv_p (arg0,
15772 strict_overflow_p)
15773 || (tree_expr_nonnegative_warnv_p (arg1,
15774 strict_overflow_p)));
15776 CASE_FLT_FN (BUILT_IN_FMIN):
15777 /* True if the 1st AND 2nd arguments are nonnegative. */
15778 return (tree_expr_nonnegative_warnv_p (arg0,
15779 strict_overflow_p)
15780 && (tree_expr_nonnegative_warnv_p (arg1,
15781 strict_overflow_p)));
15783 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15784 /* True if the 2nd argument is nonnegative. */
15785 return tree_expr_nonnegative_warnv_p (arg1,
15786 strict_overflow_p);
15788 CASE_FLT_FN (BUILT_IN_POWI):
15789 /* True if the 1st argument is nonnegative or the second
15790 argument is an even integer. */
15791 if (TREE_CODE (arg1) == INTEGER_CST
15792 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15793 return true;
15794 return tree_expr_nonnegative_warnv_p (arg0,
15795 strict_overflow_p);
15797 CASE_FLT_FN (BUILT_IN_POW):
15798 /* True if the 1st argument is nonnegative or the second
15799 argument is an even integer valued real. */
15800 if (TREE_CODE (arg1) == REAL_CST)
15802 REAL_VALUE_TYPE c;
15803 HOST_WIDE_INT n;
15805 c = TREE_REAL_CST (arg1);
15806 n = real_to_integer (&c);
15807 if ((n & 1) == 0)
15809 REAL_VALUE_TYPE cint;
15810 real_from_integer (&cint, VOIDmode, n,
15811 n < 0 ? -1 : 0, 0);
15812 if (real_identical (&c, &cint))
15813 return true;
15816 return tree_expr_nonnegative_warnv_p (arg0,
15817 strict_overflow_p);
15819 default:
15820 break;
15822 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15823 type);
15826 /* Return true if T is known to be non-negative. If the return
15827 value is based on the assumption that signed overflow is undefined,
15828 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15829 *STRICT_OVERFLOW_P. */
15831 bool
15832 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15834 enum tree_code code = TREE_CODE (t);
15835 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15836 return true;
15838 switch (code)
15840 case TARGET_EXPR:
15842 tree temp = TARGET_EXPR_SLOT (t);
15843 t = TARGET_EXPR_INITIAL (t);
15845 /* If the initializer is non-void, then it's a normal expression
15846 that will be assigned to the slot. */
15847 if (!VOID_TYPE_P (t))
15848 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15850 /* Otherwise, the initializer sets the slot in some way. One common
15851 way is an assignment statement at the end of the initializer. */
15852 while (1)
15854 if (TREE_CODE (t) == BIND_EXPR)
15855 t = expr_last (BIND_EXPR_BODY (t));
15856 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15857 || TREE_CODE (t) == TRY_CATCH_EXPR)
15858 t = expr_last (TREE_OPERAND (t, 0));
15859 else if (TREE_CODE (t) == STATEMENT_LIST)
15860 t = expr_last (t);
15861 else
15862 break;
15864 if (TREE_CODE (t) == MODIFY_EXPR
15865 && TREE_OPERAND (t, 0) == temp)
15866 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15867 strict_overflow_p);
15869 return false;
15872 case CALL_EXPR:
15874 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15875 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15877 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15878 get_callee_fndecl (t),
15879 arg0,
15880 arg1,
15881 strict_overflow_p);
15883 case COMPOUND_EXPR:
15884 case MODIFY_EXPR:
15885 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15886 strict_overflow_p);
15887 case BIND_EXPR:
15888 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15889 strict_overflow_p);
15890 case SAVE_EXPR:
15891 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15892 strict_overflow_p);
15894 default:
15895 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15896 TREE_TYPE (t));
15899 /* We don't know sign of `t', so be conservative and return false. */
15900 return false;
15903 /* Return true if T is known to be non-negative. If the return
15904 value is based on the assumption that signed overflow is undefined,
15905 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15906 *STRICT_OVERFLOW_P. */
15908 bool
15909 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15911 enum tree_code code;
15912 if (t == error_mark_node)
15913 return false;
15915 code = TREE_CODE (t);
15916 switch (TREE_CODE_CLASS (code))
15918 case tcc_binary:
15919 case tcc_comparison:
15920 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15921 TREE_TYPE (t),
15922 TREE_OPERAND (t, 0),
15923 TREE_OPERAND (t, 1),
15924 strict_overflow_p);
15926 case tcc_unary:
15927 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15928 TREE_TYPE (t),
15929 TREE_OPERAND (t, 0),
15930 strict_overflow_p);
15932 case tcc_constant:
15933 case tcc_declaration:
15934 case tcc_reference:
15935 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15937 default:
15938 break;
15941 switch (code)
15943 case TRUTH_AND_EXPR:
15944 case TRUTH_OR_EXPR:
15945 case TRUTH_XOR_EXPR:
15946 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15947 TREE_TYPE (t),
15948 TREE_OPERAND (t, 0),
15949 TREE_OPERAND (t, 1),
15950 strict_overflow_p);
15951 case TRUTH_NOT_EXPR:
15952 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15953 TREE_TYPE (t),
15954 TREE_OPERAND (t, 0),
15955 strict_overflow_p);
15957 case COND_EXPR:
15958 case CONSTRUCTOR:
15959 case OBJ_TYPE_REF:
15960 case ASSERT_EXPR:
15961 case ADDR_EXPR:
15962 case WITH_SIZE_EXPR:
15963 case SSA_NAME:
15964 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15966 default:
15967 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15971 /* Return true if `t' is known to be non-negative. Handle warnings
15972 about undefined signed overflow. */
15974 bool
15975 tree_expr_nonnegative_p (tree t)
15977 bool ret, strict_overflow_p;
15979 strict_overflow_p = false;
15980 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15981 if (strict_overflow_p)
15982 fold_overflow_warning (("assuming signed overflow does not occur when "
15983 "determining that expression is always "
15984 "non-negative"),
15985 WARN_STRICT_OVERFLOW_MISC);
15986 return ret;
15990 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15991 For floating point we further ensure that T is not denormal.
15992 Similar logic is present in nonzero_address in rtlanal.h.
15994 If the return value is based on the assumption that signed overflow
15995 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15996 change *STRICT_OVERFLOW_P. */
15998 bool
15999 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16000 bool *strict_overflow_p)
16002 switch (code)
16004 case ABS_EXPR:
16005 return tree_expr_nonzero_warnv_p (op0,
16006 strict_overflow_p);
16008 case NOP_EXPR:
16010 tree inner_type = TREE_TYPE (op0);
16011 tree outer_type = type;
16013 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16014 && tree_expr_nonzero_warnv_p (op0,
16015 strict_overflow_p));
16017 break;
16019 case NON_LVALUE_EXPR:
16020 return tree_expr_nonzero_warnv_p (op0,
16021 strict_overflow_p);
16023 default:
16024 break;
16027 return false;
16030 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16031 For floating point we further ensure that T is not denormal.
16032 Similar logic is present in nonzero_address in rtlanal.h.
16034 If the return value is based on the assumption that signed overflow
16035 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16036 change *STRICT_OVERFLOW_P. */
16038 bool
16039 tree_binary_nonzero_warnv_p (enum tree_code code,
16040 tree type,
16041 tree op0,
16042 tree op1, bool *strict_overflow_p)
16044 bool sub_strict_overflow_p;
16045 switch (code)
16047 case POINTER_PLUS_EXPR:
16048 case PLUS_EXPR:
16049 if (TYPE_OVERFLOW_UNDEFINED (type))
16051 /* With the presence of negative values it is hard
16052 to say something. */
16053 sub_strict_overflow_p = false;
16054 if (!tree_expr_nonnegative_warnv_p (op0,
16055 &sub_strict_overflow_p)
16056 || !tree_expr_nonnegative_warnv_p (op1,
16057 &sub_strict_overflow_p))
16058 return false;
16059 /* One of operands must be positive and the other non-negative. */
16060 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16061 overflows, on a twos-complement machine the sum of two
16062 nonnegative numbers can never be zero. */
16063 return (tree_expr_nonzero_warnv_p (op0,
16064 strict_overflow_p)
16065 || tree_expr_nonzero_warnv_p (op1,
16066 strict_overflow_p));
16068 break;
16070 case MULT_EXPR:
16071 if (TYPE_OVERFLOW_UNDEFINED (type))
16073 if (tree_expr_nonzero_warnv_p (op0,
16074 strict_overflow_p)
16075 && tree_expr_nonzero_warnv_p (op1,
16076 strict_overflow_p))
16078 *strict_overflow_p = true;
16079 return true;
16082 break;
16084 case MIN_EXPR:
16085 sub_strict_overflow_p = false;
16086 if (tree_expr_nonzero_warnv_p (op0,
16087 &sub_strict_overflow_p)
16088 && tree_expr_nonzero_warnv_p (op1,
16089 &sub_strict_overflow_p))
16091 if (sub_strict_overflow_p)
16092 *strict_overflow_p = true;
16094 break;
16096 case MAX_EXPR:
16097 sub_strict_overflow_p = false;
16098 if (tree_expr_nonzero_warnv_p (op0,
16099 &sub_strict_overflow_p))
16101 if (sub_strict_overflow_p)
16102 *strict_overflow_p = true;
16104 /* When both operands are nonzero, then MAX must be too. */
16105 if (tree_expr_nonzero_warnv_p (op1,
16106 strict_overflow_p))
16107 return true;
16109 /* MAX where operand 0 is positive is positive. */
16110 return tree_expr_nonnegative_warnv_p (op0,
16111 strict_overflow_p);
16113 /* MAX where operand 1 is positive is positive. */
16114 else if (tree_expr_nonzero_warnv_p (op1,
16115 &sub_strict_overflow_p)
16116 && tree_expr_nonnegative_warnv_p (op1,
16117 &sub_strict_overflow_p))
16119 if (sub_strict_overflow_p)
16120 *strict_overflow_p = true;
16121 return true;
16123 break;
16125 case BIT_IOR_EXPR:
16126 return (tree_expr_nonzero_warnv_p (op1,
16127 strict_overflow_p)
16128 || tree_expr_nonzero_warnv_p (op0,
16129 strict_overflow_p));
16131 default:
16132 break;
16135 return false;
16138 /* Return true when T is an address and is known to be nonzero.
16139 For floating point we further ensure that T is not denormal.
16140 Similar logic is present in nonzero_address in rtlanal.h.
16142 If the return value is based on the assumption that signed overflow
16143 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16144 change *STRICT_OVERFLOW_P. */
16146 bool
16147 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16149 bool sub_strict_overflow_p;
16150 switch (TREE_CODE (t))
16152 case INTEGER_CST:
16153 return !integer_zerop (t);
16155 case ADDR_EXPR:
16157 tree base = TREE_OPERAND (t, 0);
16158 if (!DECL_P (base))
16159 base = get_base_address (base);
16161 if (!base)
16162 return false;
16164 /* Weak declarations may link to NULL. Other things may also be NULL
16165 so protect with -fdelete-null-pointer-checks; but not variables
16166 allocated on the stack. */
16167 if (DECL_P (base)
16168 && (flag_delete_null_pointer_checks
16169 || (DECL_CONTEXT (base)
16170 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16171 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16172 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16174 /* Constants are never weak. */
16175 if (CONSTANT_CLASS_P (base))
16176 return true;
16178 return false;
16181 case COND_EXPR:
16182 sub_strict_overflow_p = false;
16183 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16184 &sub_strict_overflow_p)
16185 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16186 &sub_strict_overflow_p))
16188 if (sub_strict_overflow_p)
16189 *strict_overflow_p = true;
16190 return true;
16192 break;
16194 default:
16195 break;
16197 return false;
16200 /* Return true when T is an address and is known to be nonzero.
16201 For floating point we further ensure that T is not denormal.
16202 Similar logic is present in nonzero_address in rtlanal.h.
16204 If the return value is based on the assumption that signed overflow
16205 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16206 change *STRICT_OVERFLOW_P. */
16208 bool
16209 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16211 tree type = TREE_TYPE (t);
16212 enum tree_code code;
16214 /* Doing something useful for floating point would need more work. */
16215 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16216 return false;
16218 code = TREE_CODE (t);
16219 switch (TREE_CODE_CLASS (code))
16221 case tcc_unary:
16222 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16223 strict_overflow_p);
16224 case tcc_binary:
16225 case tcc_comparison:
16226 return tree_binary_nonzero_warnv_p (code, type,
16227 TREE_OPERAND (t, 0),
16228 TREE_OPERAND (t, 1),
16229 strict_overflow_p);
16230 case tcc_constant:
16231 case tcc_declaration:
16232 case tcc_reference:
16233 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16235 default:
16236 break;
16239 switch (code)
16241 case TRUTH_NOT_EXPR:
16242 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16243 strict_overflow_p);
16245 case TRUTH_AND_EXPR:
16246 case TRUTH_OR_EXPR:
16247 case TRUTH_XOR_EXPR:
16248 return tree_binary_nonzero_warnv_p (code, type,
16249 TREE_OPERAND (t, 0),
16250 TREE_OPERAND (t, 1),
16251 strict_overflow_p);
16253 case COND_EXPR:
16254 case CONSTRUCTOR:
16255 case OBJ_TYPE_REF:
16256 case ASSERT_EXPR:
16257 case ADDR_EXPR:
16258 case WITH_SIZE_EXPR:
16259 case SSA_NAME:
16260 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16262 case COMPOUND_EXPR:
16263 case MODIFY_EXPR:
16264 case BIND_EXPR:
16265 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16266 strict_overflow_p);
16268 case SAVE_EXPR:
16269 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16270 strict_overflow_p);
16272 case CALL_EXPR:
16274 tree fndecl = get_callee_fndecl (t);
16275 if (!fndecl) return false;
16276 if (flag_delete_null_pointer_checks && !flag_check_new
16277 && DECL_IS_OPERATOR_NEW (fndecl)
16278 && !TREE_NOTHROW (fndecl))
16279 return true;
16280 if (flag_delete_null_pointer_checks
16281 && lookup_attribute ("returns_nonnull",
16282 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
16283 return true;
16284 return alloca_call_p (t);
16287 default:
16288 break;
16290 return false;
16293 /* Return true when T is an address and is known to be nonzero.
16294 Handle warnings about undefined signed overflow. */
16296 bool
16297 tree_expr_nonzero_p (tree t)
16299 bool ret, strict_overflow_p;
16301 strict_overflow_p = false;
16302 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16303 if (strict_overflow_p)
16304 fold_overflow_warning (("assuming signed overflow does not occur when "
16305 "determining that expression is always "
16306 "non-zero"),
16307 WARN_STRICT_OVERFLOW_MISC);
16308 return ret;
16311 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16312 attempt to fold the expression to a constant without modifying TYPE,
16313 OP0 or OP1.
16315 If the expression could be simplified to a constant, then return
16316 the constant. If the expression would not be simplified to a
16317 constant, then return NULL_TREE. */
16319 tree
16320 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16322 tree tem = fold_binary (code, type, op0, op1);
16323 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16326 /* Given the components of a unary expression CODE, TYPE and OP0,
16327 attempt to fold the expression to a constant without modifying
16328 TYPE or OP0.
16330 If the expression could be simplified to a constant, then return
16331 the constant. If the expression would not be simplified to a
16332 constant, then return NULL_TREE. */
16334 tree
16335 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16337 tree tem = fold_unary (code, type, op0);
16338 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16341 /* If EXP represents referencing an element in a constant string
16342 (either via pointer arithmetic or array indexing), return the
16343 tree representing the value accessed, otherwise return NULL. */
16345 tree
16346 fold_read_from_constant_string (tree exp)
16348 if ((TREE_CODE (exp) == INDIRECT_REF
16349 || TREE_CODE (exp) == ARRAY_REF)
16350 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16352 tree exp1 = TREE_OPERAND (exp, 0);
16353 tree index;
16354 tree string;
16355 location_t loc = EXPR_LOCATION (exp);
16357 if (TREE_CODE (exp) == INDIRECT_REF)
16358 string = string_constant (exp1, &index);
16359 else
16361 tree low_bound = array_ref_low_bound (exp);
16362 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16364 /* Optimize the special-case of a zero lower bound.
16366 We convert the low_bound to sizetype to avoid some problems
16367 with constant folding. (E.g. suppose the lower bound is 1,
16368 and its mode is QI. Without the conversion,l (ARRAY
16369 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16370 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16371 if (! integer_zerop (low_bound))
16372 index = size_diffop_loc (loc, index,
16373 fold_convert_loc (loc, sizetype, low_bound));
16375 string = exp1;
16378 if (string
16379 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16380 && TREE_CODE (string) == STRING_CST
16381 && TREE_CODE (index) == INTEGER_CST
16382 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16383 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16384 == MODE_INT)
16385 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16386 return build_int_cst_type (TREE_TYPE (exp),
16387 (TREE_STRING_POINTER (string)
16388 [TREE_INT_CST_LOW (index)]));
16390 return NULL;
16393 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16394 an integer constant, real, or fixed-point constant.
16396 TYPE is the type of the result. */
16398 static tree
16399 fold_negate_const (tree arg0, tree type)
16401 tree t = NULL_TREE;
16403 switch (TREE_CODE (arg0))
16405 case INTEGER_CST:
16407 double_int val = tree_to_double_int (arg0);
16408 bool overflow;
16409 val = val.neg_with_overflow (&overflow);
16410 t = force_fit_type_double (type, val, 1,
16411 (overflow | TREE_OVERFLOW (arg0))
16412 && !TYPE_UNSIGNED (type));
16413 break;
16416 case REAL_CST:
16417 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16418 break;
16420 case FIXED_CST:
16422 FIXED_VALUE_TYPE f;
16423 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16424 &(TREE_FIXED_CST (arg0)), NULL,
16425 TYPE_SATURATING (type));
16426 t = build_fixed (type, f);
16427 /* Propagate overflow flags. */
16428 if (overflow_p | TREE_OVERFLOW (arg0))
16429 TREE_OVERFLOW (t) = 1;
16430 break;
16433 default:
16434 gcc_unreachable ();
16437 return t;
16440 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16441 an integer constant or real constant.
16443 TYPE is the type of the result. */
16445 tree
16446 fold_abs_const (tree arg0, tree type)
16448 tree t = NULL_TREE;
16450 switch (TREE_CODE (arg0))
16452 case INTEGER_CST:
16454 double_int val = tree_to_double_int (arg0);
16456 /* If the value is unsigned or non-negative, then the absolute value
16457 is the same as the ordinary value. */
16458 if (TYPE_UNSIGNED (type)
16459 || !val.is_negative ())
16460 t = arg0;
16462 /* If the value is negative, then the absolute value is
16463 its negation. */
16464 else
16466 bool overflow;
16467 val = val.neg_with_overflow (&overflow);
16468 t = force_fit_type_double (type, val, -1,
16469 overflow | TREE_OVERFLOW (arg0));
16472 break;
16474 case REAL_CST:
16475 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16476 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16477 else
16478 t = arg0;
16479 break;
16481 default:
16482 gcc_unreachable ();
16485 return t;
16488 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16489 constant. TYPE is the type of the result. */
16491 static tree
16492 fold_not_const (const_tree arg0, tree type)
16494 double_int val;
16496 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16498 val = ~tree_to_double_int (arg0);
16499 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16502 /* Given CODE, a relational operator, the target type, TYPE and two
16503 constant operands OP0 and OP1, return the result of the
16504 relational operation. If the result is not a compile time
16505 constant, then return NULL_TREE. */
16507 static tree
16508 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16510 int result, invert;
16512 /* From here on, the only cases we handle are when the result is
16513 known to be a constant. */
16515 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16517 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16518 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16520 /* Handle the cases where either operand is a NaN. */
16521 if (real_isnan (c0) || real_isnan (c1))
16523 switch (code)
16525 case EQ_EXPR:
16526 case ORDERED_EXPR:
16527 result = 0;
16528 break;
16530 case NE_EXPR:
16531 case UNORDERED_EXPR:
16532 case UNLT_EXPR:
16533 case UNLE_EXPR:
16534 case UNGT_EXPR:
16535 case UNGE_EXPR:
16536 case UNEQ_EXPR:
16537 result = 1;
16538 break;
16540 case LT_EXPR:
16541 case LE_EXPR:
16542 case GT_EXPR:
16543 case GE_EXPR:
16544 case LTGT_EXPR:
16545 if (flag_trapping_math)
16546 return NULL_TREE;
16547 result = 0;
16548 break;
16550 default:
16551 gcc_unreachable ();
16554 return constant_boolean_node (result, type);
16557 return constant_boolean_node (real_compare (code, c0, c1), type);
16560 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16562 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16563 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16564 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16567 /* Handle equality/inequality of complex constants. */
16568 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16570 tree rcond = fold_relational_const (code, type,
16571 TREE_REALPART (op0),
16572 TREE_REALPART (op1));
16573 tree icond = fold_relational_const (code, type,
16574 TREE_IMAGPART (op0),
16575 TREE_IMAGPART (op1));
16576 if (code == EQ_EXPR)
16577 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16578 else if (code == NE_EXPR)
16579 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16580 else
16581 return NULL_TREE;
16584 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16586 unsigned count = VECTOR_CST_NELTS (op0);
16587 tree *elts = XALLOCAVEC (tree, count);
16588 gcc_assert (VECTOR_CST_NELTS (op1) == count
16589 && TYPE_VECTOR_SUBPARTS (type) == count);
16591 for (unsigned i = 0; i < count; i++)
16593 tree elem_type = TREE_TYPE (type);
16594 tree elem0 = VECTOR_CST_ELT (op0, i);
16595 tree elem1 = VECTOR_CST_ELT (op1, i);
16597 tree tem = fold_relational_const (code, elem_type,
16598 elem0, elem1);
16600 if (tem == NULL_TREE)
16601 return NULL_TREE;
16603 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16606 return build_vector (type, elts);
16609 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16611 To compute GT, swap the arguments and do LT.
16612 To compute GE, do LT and invert the result.
16613 To compute LE, swap the arguments, do LT and invert the result.
16614 To compute NE, do EQ and invert the result.
16616 Therefore, the code below must handle only EQ and LT. */
16618 if (code == LE_EXPR || code == GT_EXPR)
16620 tree tem = op0;
16621 op0 = op1;
16622 op1 = tem;
16623 code = swap_tree_comparison (code);
16626 /* Note that it is safe to invert for real values here because we
16627 have already handled the one case that it matters. */
16629 invert = 0;
16630 if (code == NE_EXPR || code == GE_EXPR)
16632 invert = 1;
16633 code = invert_tree_comparison (code, false);
16636 /* Compute a result for LT or EQ if args permit;
16637 Otherwise return T. */
16638 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16640 if (code == EQ_EXPR)
16641 result = tree_int_cst_equal (op0, op1);
16642 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16643 result = INT_CST_LT_UNSIGNED (op0, op1);
16644 else
16645 result = INT_CST_LT (op0, op1);
16647 else
16648 return NULL_TREE;
16650 if (invert)
16651 result ^= 1;
16652 return constant_boolean_node (result, type);
16655 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16656 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16657 itself. */
16659 tree
16660 fold_build_cleanup_point_expr (tree type, tree expr)
16662 /* If the expression does not have side effects then we don't have to wrap
16663 it with a cleanup point expression. */
16664 if (!TREE_SIDE_EFFECTS (expr))
16665 return expr;
16667 /* If the expression is a return, check to see if the expression inside the
16668 return has no side effects or the right hand side of the modify expression
16669 inside the return. If either don't have side effects set we don't need to
16670 wrap the expression in a cleanup point expression. Note we don't check the
16671 left hand side of the modify because it should always be a return decl. */
16672 if (TREE_CODE (expr) == RETURN_EXPR)
16674 tree op = TREE_OPERAND (expr, 0);
16675 if (!op || !TREE_SIDE_EFFECTS (op))
16676 return expr;
16677 op = TREE_OPERAND (op, 1);
16678 if (!TREE_SIDE_EFFECTS (op))
16679 return expr;
16682 return build1 (CLEANUP_POINT_EXPR, type, expr);
16685 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16686 of an indirection through OP0, or NULL_TREE if no simplification is
16687 possible. */
16689 tree
16690 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16692 tree sub = op0;
16693 tree subtype;
16695 STRIP_NOPS (sub);
16696 subtype = TREE_TYPE (sub);
16697 if (!POINTER_TYPE_P (subtype))
16698 return NULL_TREE;
16700 if (TREE_CODE (sub) == ADDR_EXPR)
16702 tree op = TREE_OPERAND (sub, 0);
16703 tree optype = TREE_TYPE (op);
16704 /* *&CONST_DECL -> to the value of the const decl. */
16705 if (TREE_CODE (op) == CONST_DECL)
16706 return DECL_INITIAL (op);
16707 /* *&p => p; make sure to handle *&"str"[cst] here. */
16708 if (type == optype)
16710 tree fop = fold_read_from_constant_string (op);
16711 if (fop)
16712 return fop;
16713 else
16714 return op;
16716 /* *(foo *)&fooarray => fooarray[0] */
16717 else if (TREE_CODE (optype) == ARRAY_TYPE
16718 && type == TREE_TYPE (optype)
16719 && (!in_gimple_form
16720 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16722 tree type_domain = TYPE_DOMAIN (optype);
16723 tree min_val = size_zero_node;
16724 if (type_domain && TYPE_MIN_VALUE (type_domain))
16725 min_val = TYPE_MIN_VALUE (type_domain);
16726 if (in_gimple_form
16727 && TREE_CODE (min_val) != INTEGER_CST)
16728 return NULL_TREE;
16729 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16730 NULL_TREE, NULL_TREE);
16732 /* *(foo *)&complexfoo => __real__ complexfoo */
16733 else if (TREE_CODE (optype) == COMPLEX_TYPE
16734 && type == TREE_TYPE (optype))
16735 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16736 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16737 else if (TREE_CODE (optype) == VECTOR_TYPE
16738 && type == TREE_TYPE (optype))
16740 tree part_width = TYPE_SIZE (type);
16741 tree index = bitsize_int (0);
16742 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16746 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16747 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16749 tree op00 = TREE_OPERAND (sub, 0);
16750 tree op01 = TREE_OPERAND (sub, 1);
16752 STRIP_NOPS (op00);
16753 if (TREE_CODE (op00) == ADDR_EXPR)
16755 tree op00type;
16756 op00 = TREE_OPERAND (op00, 0);
16757 op00type = TREE_TYPE (op00);
16759 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16760 if (TREE_CODE (op00type) == VECTOR_TYPE
16761 && type == TREE_TYPE (op00type))
16763 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16764 tree part_width = TYPE_SIZE (type);
16765 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16766 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16767 tree index = bitsize_int (indexi);
16769 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16770 return fold_build3_loc (loc,
16771 BIT_FIELD_REF, type, op00,
16772 part_width, index);
16775 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16776 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16777 && type == TREE_TYPE (op00type))
16779 tree size = TYPE_SIZE_UNIT (type);
16780 if (tree_int_cst_equal (size, op01))
16781 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16783 /* ((foo *)&fooarray)[1] => fooarray[1] */
16784 else if (TREE_CODE (op00type) == ARRAY_TYPE
16785 && type == TREE_TYPE (op00type))
16787 tree type_domain = TYPE_DOMAIN (op00type);
16788 tree min_val = size_zero_node;
16789 if (type_domain && TYPE_MIN_VALUE (type_domain))
16790 min_val = TYPE_MIN_VALUE (type_domain);
16791 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16792 TYPE_SIZE_UNIT (type));
16793 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16794 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16795 NULL_TREE, NULL_TREE);
16800 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16801 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16802 && type == TREE_TYPE (TREE_TYPE (subtype))
16803 && (!in_gimple_form
16804 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16806 tree type_domain;
16807 tree min_val = size_zero_node;
16808 sub = build_fold_indirect_ref_loc (loc, sub);
16809 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16810 if (type_domain && TYPE_MIN_VALUE (type_domain))
16811 min_val = TYPE_MIN_VALUE (type_domain);
16812 if (in_gimple_form
16813 && TREE_CODE (min_val) != INTEGER_CST)
16814 return NULL_TREE;
16815 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16816 NULL_TREE);
16819 return NULL_TREE;
16822 /* Builds an expression for an indirection through T, simplifying some
16823 cases. */
16825 tree
16826 build_fold_indirect_ref_loc (location_t loc, tree t)
16828 tree type = TREE_TYPE (TREE_TYPE (t));
16829 tree sub = fold_indirect_ref_1 (loc, type, t);
16831 if (sub)
16832 return sub;
16834 return build1_loc (loc, INDIRECT_REF, type, t);
16837 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16839 tree
16840 fold_indirect_ref_loc (location_t loc, tree t)
16842 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16844 if (sub)
16845 return sub;
16846 else
16847 return t;
16850 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16851 whose result is ignored. The type of the returned tree need not be
16852 the same as the original expression. */
16854 tree
16855 fold_ignored_result (tree t)
16857 if (!TREE_SIDE_EFFECTS (t))
16858 return integer_zero_node;
16860 for (;;)
16861 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16863 case tcc_unary:
16864 t = TREE_OPERAND (t, 0);
16865 break;
16867 case tcc_binary:
16868 case tcc_comparison:
16869 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16870 t = TREE_OPERAND (t, 0);
16871 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16872 t = TREE_OPERAND (t, 1);
16873 else
16874 return t;
16875 break;
16877 case tcc_expression:
16878 switch (TREE_CODE (t))
16880 case COMPOUND_EXPR:
16881 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16882 return t;
16883 t = TREE_OPERAND (t, 0);
16884 break;
16886 case COND_EXPR:
16887 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16888 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16889 return t;
16890 t = TREE_OPERAND (t, 0);
16891 break;
16893 default:
16894 return t;
16896 break;
16898 default:
16899 return t;
16903 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16904 This can only be applied to objects of a sizetype. */
16906 tree
16907 round_up_loc (location_t loc, tree value, int divisor)
16909 tree div = NULL_TREE;
16911 gcc_assert (divisor > 0);
16912 if (divisor == 1)
16913 return value;
16915 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16916 have to do anything. Only do this when we are not given a const,
16917 because in that case, this check is more expensive than just
16918 doing it. */
16919 if (TREE_CODE (value) != INTEGER_CST)
16921 div = build_int_cst (TREE_TYPE (value), divisor);
16923 if (multiple_of_p (TREE_TYPE (value), value, div))
16924 return value;
16927 /* If divisor is a power of two, simplify this to bit manipulation. */
16928 if (divisor == (divisor & -divisor))
16930 if (TREE_CODE (value) == INTEGER_CST)
16932 double_int val = tree_to_double_int (value);
16933 bool overflow_p;
16935 if ((val.low & (divisor - 1)) == 0)
16936 return value;
16938 overflow_p = TREE_OVERFLOW (value);
16939 val.low &= ~(divisor - 1);
16940 val.low += divisor;
16941 if (val.low == 0)
16943 val.high++;
16944 if (val.high == 0)
16945 overflow_p = true;
16948 return force_fit_type_double (TREE_TYPE (value), val,
16949 -1, overflow_p);
16951 else
16953 tree t;
16955 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16956 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16957 t = build_int_cst (TREE_TYPE (value), -divisor);
16958 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16961 else
16963 if (!div)
16964 div = build_int_cst (TREE_TYPE (value), divisor);
16965 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16966 value = size_binop_loc (loc, MULT_EXPR, value, div);
16969 return value;
16972 /* Likewise, but round down. */
16974 tree
16975 round_down_loc (location_t loc, tree value, int divisor)
16977 tree div = NULL_TREE;
16979 gcc_assert (divisor > 0);
16980 if (divisor == 1)
16981 return value;
16983 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16984 have to do anything. Only do this when we are not given a const,
16985 because in that case, this check is more expensive than just
16986 doing it. */
16987 if (TREE_CODE (value) != INTEGER_CST)
16989 div = build_int_cst (TREE_TYPE (value), divisor);
16991 if (multiple_of_p (TREE_TYPE (value), value, div))
16992 return value;
16995 /* If divisor is a power of two, simplify this to bit manipulation. */
16996 if (divisor == (divisor & -divisor))
16998 tree t;
17000 t = build_int_cst (TREE_TYPE (value), -divisor);
17001 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
17003 else
17005 if (!div)
17006 div = build_int_cst (TREE_TYPE (value), divisor);
17007 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
17008 value = size_binop_loc (loc, MULT_EXPR, value, div);
17011 return value;
17014 /* Returns the pointer to the base of the object addressed by EXP and
17015 extracts the information about the offset of the access, storing it
17016 to PBITPOS and POFFSET. */
17018 static tree
17019 split_address_to_core_and_offset (tree exp,
17020 HOST_WIDE_INT *pbitpos, tree *poffset)
17022 tree core;
17023 enum machine_mode mode;
17024 int unsignedp, volatilep;
17025 HOST_WIDE_INT bitsize;
17026 location_t loc = EXPR_LOCATION (exp);
17028 if (TREE_CODE (exp) == ADDR_EXPR)
17030 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
17031 poffset, &mode, &unsignedp, &volatilep,
17032 false);
17033 core = build_fold_addr_expr_loc (loc, core);
17035 else
17037 core = exp;
17038 *pbitpos = 0;
17039 *poffset = NULL_TREE;
17042 return core;
17045 /* Returns true if addresses of E1 and E2 differ by a constant, false
17046 otherwise. If they do, E1 - E2 is stored in *DIFF. */
17048 bool
17049 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
17051 tree core1, core2;
17052 HOST_WIDE_INT bitpos1, bitpos2;
17053 tree toffset1, toffset2, tdiff, type;
17055 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17056 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17058 if (bitpos1 % BITS_PER_UNIT != 0
17059 || bitpos2 % BITS_PER_UNIT != 0
17060 || !operand_equal_p (core1, core2, 0))
17061 return false;
17063 if (toffset1 && toffset2)
17065 type = TREE_TYPE (toffset1);
17066 if (type != TREE_TYPE (toffset2))
17067 toffset2 = fold_convert (type, toffset2);
17069 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17070 if (!cst_and_fits_in_hwi (tdiff))
17071 return false;
17073 *diff = int_cst_value (tdiff);
17075 else if (toffset1 || toffset2)
17077 /* If only one of the offsets is non-constant, the difference cannot
17078 be a constant. */
17079 return false;
17081 else
17082 *diff = 0;
17084 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17085 return true;
17088 /* Simplify the floating point expression EXP when the sign of the
17089 result is not significant. Return NULL_TREE if no simplification
17090 is possible. */
17092 tree
17093 fold_strip_sign_ops (tree exp)
17095 tree arg0, arg1;
17096 location_t loc = EXPR_LOCATION (exp);
17098 switch (TREE_CODE (exp))
17100 case ABS_EXPR:
17101 case NEGATE_EXPR:
17102 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17103 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17105 case MULT_EXPR:
17106 case RDIV_EXPR:
17107 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17108 return NULL_TREE;
17109 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17110 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17111 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17112 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17113 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17114 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17115 break;
17117 case COMPOUND_EXPR:
17118 arg0 = TREE_OPERAND (exp, 0);
17119 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17120 if (arg1)
17121 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17122 break;
17124 case COND_EXPR:
17125 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17126 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17127 if (arg0 || arg1)
17128 return fold_build3_loc (loc,
17129 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17130 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17131 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17132 break;
17134 case CALL_EXPR:
17136 const enum built_in_function fcode = builtin_mathfn_code (exp);
17137 switch (fcode)
17139 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17140 /* Strip copysign function call, return the 1st argument. */
17141 arg0 = CALL_EXPR_ARG (exp, 0);
17142 arg1 = CALL_EXPR_ARG (exp, 1);
17143 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17145 default:
17146 /* Strip sign ops from the argument of "odd" math functions. */
17147 if (negate_mathfn_p (fcode))
17149 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17150 if (arg0)
17151 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17153 break;
17156 break;
17158 default:
17159 break;
17161 return NULL_TREE;